In [ ]:
import numpy as np
import pandas as pd

import matplotlib as mpl
import matplotlib.pyplot as plt

from scipy.spatial.distance import cdist
from sklearn import cluster
from yellowbrick.cluster import KElbowVisualizer
from sklearn.preprocessing import normalize
from sklearn.metrics.cluster import adjusted_rand_score
from sklearn.metrics import silhouette_score
from sklearn.cluster import MeanShift, estimate_bandwidth, KMeans
from sklearn.cluster import AgglomerativeClustering
from sklearn.metrics import v_measure_score 
from sklearn.preprocessing import normalize
import scipy.cluster.hierarchy as shc



from sklearn.preprocessing import MinMaxScaler
from sklearn.model_selection import train_test_split
from keras.preprocessing.sequence import TimeseriesGenerator
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras import layers
from keras.layers import BatchNormalization
from keras import models
from keras import optimizers
from keras import callbacks
from keras.layers import Dropout
from sklearn import metrics
from sklearn.metrics import classification_report
import math
from sklearn.metrics import mean_squared_error


%matplotlib inline
In [ ]:
from google.colab import drive
drive.mount('/content/drive')
Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).
In [ ]:
import tensorflow as tf

print(tf.__version__)
2.3.0
In [ ]:
df = pd.read_csv('/content/drive/My Drive/Final thesis/reshapedGC.csv', parse_dates = ['Datetime'], dayfirst = True,na_filter=False)
In [ ]:
df.head()
Out[ ]:
Datetime 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 ... 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
0 2012-07-01 00:00:00 0.855 0.965 0.084 0.108 0.260 0.087 0.208 0.025 0.020 0.567 0.241 0.234 0.191 0.066 0.355 0.122 0.184 0.075 0.050 0.187 0.067 0.043 0.130 0.038 0.863 0.612 0.106 0.068 0.045 1.127 0.060 0.668 0.269 0.277 0.183 0.028 0.032 0.101 ... 0.584 0.490 0.783 0.061 0.100 0.000 0.761 0.082 0.725 0.252 1.050 0.151 0.055 0.251 0.131 0.081 0.081 0.060 0.094 0.791 1.096 0.313 0.055 0.075 0.048 0.147 0.788 0.520 0.726 0.091 0.221 0.075 0.183 0.134 0.075 0.030 0.141 0.157 0.541 0.219
1 2012-07-01 00:30:00 0.786 0.927 0.084 0.098 0.253 0.098 0.151 0.022 0.036 0.547 0.197 0.343 0.176 0.067 0.508 0.121 0.128 0.075 0.063 0.169 0.069 0.117 0.127 0.088 0.813 0.609 0.088 0.048 0.043 1.262 0.060 0.661 0.331 0.276 0.077 0.074 0.031 0.073 ... 0.136 0.205 0.676 0.058 0.075 0.006 1.390 0.107 0.675 0.216 0.996 0.241 0.053 0.159 0.106 0.125 0.091 0.056 0.063 0.787 0.627 0.106 0.042 0.081 0.042 0.130 0.738 0.177 0.733 0.110 0.231 0.075 0.177 0.174 0.084 0.053 0.257 0.127 0.100 0.099
2 2012-07-01 01:00:00 0.604 1.359 0.082 0.105 0.180 0.064 0.092 0.011 0.009 0.613 0.163 0.234 0.199 0.052 0.555 0.147 0.096 0.038 0.038 0.186 0.059 0.054 0.146 0.031 0.863 0.414 0.094 0.065 0.040 1.157 0.059 0.543 0.281 0.279 0.122 0.061 0.045 0.041 ... 0.108 0.200 0.683 0.122 0.087 0.094 1.450 0.090 0.701 0.185 1.013 0.181 0.092 0.298 0.119 0.154 0.093 0.097 0.106 1.057 0.691 0.119 0.026 0.116 0.790 0.203 0.675 0.439 0.750 0.065 0.247 0.063 0.193 0.165 0.054 0.044 0.197 0.122 0.090 0.134
3 2012-07-01 01:30:00 0.544 0.060 0.084 0.075 0.220 0.089 0.152 0.023 0.045 0.519 0.163 0.339 0.164 0.057 0.542 0.141 0.098 0.075 0.063 0.176 0.066 0.035 0.133 0.100 0.838 0.264 0.106 0.074 0.048 0.232 0.053 0.276 0.169 0.305 0.025 0.027 0.024 0.040 ... 0.103 0.177 0.666 0.137 0.075 0.063 1.841 0.077 0.625 0.169 0.324 0.154 0.055 0.267 0.119 0.188 0.093 0.157 0.081 0.833 0.654 0.088 0.074 0.083 1.146 0.120 0.175 0.041 0.211 0.064 0.193 0.444 0.185 0.104 0.062 0.029 0.273 0.120 0.094 0.100
4 2012-07-01 02:00:00 0.597 0.059 0.086 0.102 0.171 0.067 0.083 0.024 0.099 0.314 0.207 0.330 0.190 0.066 0.571 0.219 0.097 0.069 0.063 0.172 0.070 0.104 0.133 0.025 0.838 0.215 0.100 0.080 0.033 0.214 0.476 0.285 0.175 0.285 0.087 0.042 0.051 0.040 ... 0.121 0.173 0.668 0.089 0.094 0.006 1.074 0.024 0.651 0.157 0.170 0.154 0.050 0.199 0.119 0.135 0.073 0.065 0.106 0.838 0.204 0.137 0.026 0.095 1.049 0.165 0.188 0.039 0.159 0.198 0.141 0.081 0.176 0.130 0.074 0.044 0.206 0.106 0.046 0.123

5 rows × 301 columns

In [ ]:
 def getSeason(month):
    if (month == 6 or month == 7 or month == 8):
       return "WINTER"
    elif(month == 9 or month == 10 or month == 11):
       return "SPRING"
    elif(month == 12 or month == 1 or month == 2):
       return "SUMMER"
    else:
       return "AUTUMN"
In [ ]:
df['month'] = df['Datetime'].apply(lambda x: x.month)
In [ ]:
df['hour'] = df['Datetime'].apply(lambda x: x.hour)
df['minute'] = df['Datetime'].apply(lambda x: x.minute)
In [ ]:
df['season'] = df['month'].apply(lambda x: getSeason(x))
In [ ]:
df.season.unique()
Out[ ]:
array(['WINTER', 'SPRING', 'SUMMER', 'AUTUMN'], dtype=object)
In [ ]:
df.season.value_counts()
Out[ ]:
WINTER    4416
AUTUMN    4416
SPRING    4368
SUMMER    4320
Name: season, dtype: int64
In [ ]:
dfS = df.groupby(['hour','season']).mean()
In [ ]:
dfS.head()
Out[ ]:
1 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 ... 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 month minute
hour season
0 AUTUMN 0.216712 0.106772 0.121103 0.155516 0.265690 0.137467 0.162832 0.029255 0.044163 0.222864 0.208429 0.096114 0.389668 0.053413 0.167853 0.216375 0.133614 0.107793 0.316168 0.696375 0.120848 0.054859 0.151734 0.091060 0.496223 0.151712 0.118266 0.082255 0.029810 0.119891 0.080223 0.210859 0.189902 0.252353 0.162696 0.146761 0.078837 0.068196 0.099859 0.125386 ... 0.274168 0.043614 0.115576 0.070995 0.649522 0.089054 0.662663 0.192636 0.246283 0.125163 0.084462 0.238842 0.165473 0.116190 0.125060 0.124761 0.131283 0.232495 0.613804 0.171723 0.069016 0.096717 0.441299 0.174451 0.261620 0.083190 1.138196 0.132799 0.301016 0.054147 0.458076 0.663005 0.164886 0.055000 0.189620 0.104652 0.183054 0.509424 4.00000 15.0
SPRING 0.232247 0.098198 0.111159 0.112187 0.282522 0.115192 0.140456 0.037044 0.054110 0.176780 0.196758 0.083593 0.192984 0.087522 0.149808 0.154401 0.127011 0.103967 0.232808 0.632780 0.139126 0.101780 0.185857 0.085505 0.312126 0.421698 0.112203 0.082038 0.043747 0.244808 0.078582 0.156027 0.225533 0.246709 0.150802 0.113269 0.063165 0.073742 0.096863 0.131445 ... 0.249368 0.042357 0.136692 0.067835 0.336033 0.121824 0.666192 0.183857 0.295495 0.121692 0.080484 0.281786 0.159104 0.114885 0.121973 0.132637 0.106363 0.159330 0.602313 0.216423 0.066016 0.121692 0.256022 0.209011 0.194769 0.100379 0.625385 0.151110 0.345637 0.066956 0.171626 0.272275 0.149429 0.048500 0.242346 0.126764 0.170819 0.651396 10.00000 15.0
SUMMER 0.140489 0.103872 0.140000 0.108378 0.254717 0.133822 0.150450 0.034611 0.121333 0.329567 0.233656 0.099422 0.277500 0.074106 0.236328 0.368778 0.159283 0.109567 0.359178 0.735322 0.121306 0.106983 0.209367 0.120339 0.279767 0.246322 0.122361 0.085211 0.045611 0.111139 0.078356 0.276756 0.153572 0.379367 0.129922 0.141811 0.067183 0.081483 0.127728 0.149956 ... 0.205606 0.049711 0.151822 0.070578 0.359844 0.116667 0.705428 0.203544 0.282228 0.132083 0.089828 0.326383 0.172617 0.148456 0.153467 0.158000 0.162833 0.158617 0.536456 0.167061 0.080700 0.102372 0.472428 0.250011 0.370889 0.101411 0.984267 0.146278 0.340672 0.055922 0.383856 0.478950 0.188672 0.084578 0.233133 0.118378 0.277961 0.771394 5.10000 15.0
WINTER 0.340592 0.216043 0.102457 0.131728 0.394516 0.113005 0.145353 0.091446 0.038940 0.235179 0.207217 0.171038 0.256304 0.085158 0.447315 0.155022 0.192549 0.076690 0.581462 0.473565 0.110071 0.087935 0.216620 0.095712 0.951728 0.346864 0.108310 0.084283 0.038842 0.250859 0.080500 0.229462 0.380391 0.265745 0.279701 0.093239 0.083326 0.063636 0.087783 0.114418 ... 0.580663 0.052054 0.110005 0.075250 0.965696 0.076924 0.664359 0.173810 0.357234 0.154011 0.089212 0.286505 0.166033 0.101674 0.097815 0.112348 0.123565 0.280679 0.764413 0.269310 0.058940 0.175429 0.272880 0.224446 0.639826 0.097288 0.768147 0.138402 0.319016 0.069995 0.245592 0.517348 0.147484 0.051712 0.191723 0.118826 0.327114 0.362005 7.01087 15.0
1 AUTUMN 0.216908 0.071549 0.108435 0.150658 0.237995 0.130973 0.145918 0.026984 0.041446 0.178038 0.195076 0.076364 0.226402 0.053071 0.147728 0.211440 0.095027 0.107310 0.293837 0.511826 0.127723 0.048717 0.143130 0.075826 0.506315 0.111880 0.113342 0.080935 0.029196 0.109114 0.073489 0.173614 0.164880 0.225005 0.136652 0.129734 0.070902 0.072658 0.100397 0.125832 ... 0.209717 0.040147 0.112717 0.059864 0.886679 0.088304 0.641076 0.189147 0.182984 0.131870 0.080799 0.247348 0.163462 0.110201 0.126375 0.127973 0.133484 0.242022 0.635652 0.121397 0.060951 0.078652 0.307054 0.168245 0.217109 0.072158 0.844565 0.117076 0.212370 0.065103 0.323446 0.983489 0.126679 0.051668 0.180011 0.082582 0.154120 0.273016 4.00000 15.0

5 rows × 301 columns

In [ ]:
#dfGCwk = dfGC[dfGC.weekday == True]
df_A = df[df.season == 'AUTUMN']
df_W = df[df.season == 'WINTER']
df_SU = df[df.season == 'SUMMER']
df_SP = df[df.season == 'SPRING']
In [ ]:
df_A = df_A.drop(columns = ['season','month', 'hour', 'minute'])
df_W = df_W.drop(columns = ['season','month', 'hour', 'minute'])
df_SU = df_SU.drop(columns = ['season','month', 'hour', 'minute'])
df_SP = df_SP.drop(columns = ['season','month', 'hour', 'minute'])
In [ ]:
df_A = df_A.groupby(['Datetime']).mean()
df_W = df_W.groupby(['Datetime']).mean()
df_SU = df_SU.groupby(['Datetime']).mean()
df_SP = df_SP.groupby(['Datetime']).mean()
df_A.head()
Out[ ]:
1 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 ... 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
Datetime
2013-03-01 00:00:00 0.110 0.051 0.099 0.007 0.262 0.070 0.156 0.0 0.018 0.150 0.229 0.122 0.144 0.041 0.186 0.177 0.077 0.119 0.313 1.168 0.443 0.017 0.175 0.113 0.475 0.489 0.119 0.051 0.043 0.094 0.040 0.170 0.169 0.207 0.077 0.035 0.041 0.046 0.113 0.048 ... 0.142 0.224 0.125 0.044 0.144 0.019 0.263 0.103 0.613 0.169 0.604 0.119 0.087 0.106 0.150 0.121 0.158 0.146 0.156 0.581 0.299 0.194 0.065 0.049 1.238 0.169 0.200 0.042 2.921 0.129 0.236 0.025 0.134 0.403 0.094 0.055 0.222 0.120 0.071 0.958
2013-03-01 00:30:00 0.213 0.049 0.089 0.006 0.325 0.180 0.095 0.0 0.040 0.152 0.243 0.101 0.106 0.055 0.189 0.258 0.088 0.094 0.188 0.708 0.071 0.053 0.155 0.063 0.250 0.310 0.113 0.081 0.016 0.087 0.038 0.137 0.200 0.162 0.248 0.074 0.053 0.043 0.058 0.070 ... 0.110 0.257 0.072 0.029 0.138 0.038 0.134 0.108 0.626 0.197 0.485 0.101 0.074 0.251 0.150 0.093 0.155 0.159 0.100 0.159 0.597 0.106 0.049 0.037 1.099 0.188 0.175 0.039 1.630 0.086 0.258 0.069 1.982 1.212 0.077 0.061 0.199 0.091 0.137 0.535
2013-03-01 01:00:00 0.109 0.048 0.098 0.006 0.244 0.113 0.151 0.0 0.029 0.250 0.209 0.092 0.089 0.036 0.146 0.271 0.086 0.100 0.213 0.678 0.075 0.009 0.137 0.088 0.263 0.759 0.113 0.094 0.042 0.099 0.028 0.181 0.150 0.176 0.047 0.113 0.026 0.042 0.130 0.103 ... 0.112 0.356 0.097 0.041 0.163 0.006 0.064 0.051 0.700 0.207 0.143 0.203 0.090 0.266 0.156 0.078 0.162 0.085 0.113 0.736 0.662 0.088 0.052 0.019 0.747 0.135 0.113 0.050 0.744 0.165 0.245 0.031 1.749 2.269 0.098 0.031 0.169 0.073 0.194 0.284
2013-03-01 01:30:00 0.180 0.049 0.102 0.007 0.246 0.142 0.099 0.0 0.035 0.165 0.165 0.091 0.110 0.049 0.157 0.246 0.074 0.125 0.213 0.238 0.074 0.048 0.129 0.088 0.188 0.117 0.106 0.068 0.017 0.111 0.027 0.177 0.144 0.211 0.053 0.067 0.053 0.057 0.097 0.139 ... 0.119 0.291 0.086 0.036 0.132 0.044 0.044 0.095 0.639 0.236 0.540 0.138 0.065 0.178 0.188 0.072 0.195 0.085 0.125 0.505 0.625 0.156 0.057 0.026 0.116 0.159 0.150 0.052 2.269 0.137 0.199 0.063 0.143 2.036 0.069 0.058 0.206 0.074 0.103 0.243
2013-03-01 02:00:00 0.141 0.049 0.102 0.006 0.277 0.145 0.163 0.0 0.041 0.194 0.181 0.092 0.083 0.050 0.136 0.309 0.074 0.119 0.200 0.172 0.074 0.022 0.141 0.075 0.225 0.105 0.113 0.086 0.033 0.100 0.034 0.208 0.144 0.187 0.082 0.088 0.044 0.095 0.058 0.138 ... 0.120 0.285 0.078 0.033 0.150 0.006 0.065 0.116 0.625 0.202 0.128 0.124 0.046 0.257 0.169 0.050 0.158 0.210 0.094 0.174 0.615 0.075 0.042 0.016 0.117 0.128 0.088 0.052 2.226 0.157 0.204 0.038 0.135 0.303 0.091 0.049 0.206 0.074 0.107 0.488

5 rows × 299 columns

In [ ]:
X_A = df_A.transpose()
X_W = df_W.transpose()
X_SU = df_SU.transpose()
X_SP = df_SP.transpose()
In [ ]:
print(X_A.shape, X_W.shape, X_SU.shape, X_SP.shape)
(299, 4416) (299, 4416) (299, 4320) (299, 4368)
In [ ]:
def elbow_method(dataframe, min_k, max_k):
    # Try all k's between min_k and max_k
    cluster_nums = range(min_k, max_k + 1)
    
    # Array of K-means algorithm objects
    k_mean_algs = [cluster.KMeans(n_clusters = k) for k in cluster_nums]
    
    # Array with the results of the K-mean algorithms
    k_mean_res = [alg.fit(dataframe) for alg in k_mean_algs]
    
    # Array with the centroids for each value of k
    centroids = [res.cluster_centers_ for res in k_mean_res]
    
    # Euclidean distance between the points of the cluster and the centroids
    distances = [cdist(dataframe, centroid, 'euclidean') for centroid in centroids]
    
    # Get the closest centroid (and the corresponding distance)
    min_indices = [np.argmin(distance, axis = 1) for distance in distances]
    min_distances = [np.min(distance, axis = 1) for distance in distances]
    
    # Calculate the average squared distance
    avg_sum_squares = [sum(dist ** 2) / df_A.shape[0] for dist in min_distances]

    # Plot the elbow graph
    fig = plt.figure(figsize=(9,5))
    ax = fig.add_subplot(111)
    ax.plot(cluster_nums, avg_sum_squares, 'b*-',color='tab:green')
    plt.grid(True)
    plt.xlabel('Number of clusters')
    plt.ylabel('Average sum of squares within cluster')
    plt.show()
    
    return (k_mean_algs, k_mean_res)
In [ ]:
k_algs, k_res = elbow_method(X_A,2,10)
In [ ]:
k_algs, k_res = elbow_method(X_W,2,10)
In [ ]:
k_algs, k_res = elbow_method(X_SU,2,10)
In [ ]:
k_algs, k_res = elbow_method(X_SP,2,10)
In [ ]:
#km_ = KMeans().fit(X_A)
#pd.Series.value_counts(km_.labels_)

#visualizer = KElbowVisualizer(km_, k=(2,10), timings=False)
#visualizer.fit(X_A)      
#visualizer.show() 
In [ ]:
#km_ = KMeans().fit(X_W)
#pd.Series.value_counts(km_.labels_)

#visualizer = KElbowVisualizer(km_, k=(2,10), timings=False)
#visualizer.fit(X_A)      
#visualizer.show() 
In [ ]:
#km_ = KMeans().fit(X_SU)
#pd.Series.value_counts(km_.labels_)
#
#visualizer = KElbowVisualizer(km_, k=(2,10), timings=False)
#visualizer.fit(X_A)      
#visualizer.show() 
In [ ]:
#km_ = KMeans().fit(X_SP)
#pd.Series.value_counts(km_.labels_)##

#visualizer = KElbowVisualizer(km_, k=(2,10), timings=False)
#visualizer.fit(X_A)      
#visualizer.show() 
In [ ]:
v_scores = [] 

N_Clusters = [2, 3, 4, 5, 6, 7, 8] 

km_2_A = KMeans(2).fit(X_A)
km_2_Av = KMeans(2).fit(df_A)
True_lables1 =km_2_Av.labels_
labels1 = km_2_Av.predict(df_A)
v_scores.append(v_measure_score(df_A['4'], labels1)) 

km_3_A = KMeans(3).fit(X_A)
km_3_Av = KMeans(3).fit(df_A)
True_lables2 =km_3_Av.labels_
labels2 = km_3_Av.predict(df_A)
v_scores.append(v_measure_score(df_A['4'], labels2)) 

km_4_A = KMeans(4).fit(X_A)
km_4_Av = KMeans(4).fit(df_A)
True_lables3 =km_4_Av.labels_
labels3 = km_4_Av.predict(df_A)
v_scores.append(v_measure_score(df_A['4'], labels3)) 

km_5_A = KMeans(5).fit(X_A)
km_5_Av = KMeans(5).fit(df_A)
True_lables4 =km_5_Av.labels_
labels4 = km_5_Av.predict(df_A)
v_scores.append(v_measure_score(df_A['4'], labels4)) 

km_6_A = KMeans(6).fit(X_A)
km_6_Av = KMeans(6).fit(df_A)
True_lables5 =km_6_Av.labels_
labels5 = km_6_Av.predict(df_A)
v_scores.append(v_measure_score(df_A['4'], labels5)) 

km_7_A = KMeans(7).fit(X_A)
km_7_Av = KMeans(7).fit(df_A)
True_lables6 =km_7_Av.labels_
labels6 = km_7_Av.predict(df_A)
v_scores.append(v_measure_score(df_A['4'], labels6)) 

km_8_A = KMeans(8).fit(X_A)
km_8_Av = KMeans(8).fit(df_A)
True_lables7 =km_8_Av.labels_
labels7 = km_8_Av.predict(df_A)
v_scores.append(v_measure_score(df_A['4'], labels7)) 


v_scores_w = [] 

N_Clusters = [2, 3, 4, 5, 6, 7, 8] 

km_2_W = KMeans(2).fit(X_W)
km_2_Wv = KMeans(2).fit(df_W)
True_lables1_w =km_2_Wv.labels_
labels1_w = km_2_Wv.predict(df_W)
v_scores_w.append(v_measure_score(df_W['4'], labels1_w)) 

km_3_W = KMeans(3).fit(X_W)
km_3_Wv = KMeans(3).fit(df_W)
True_lables2_w =km_3_Wv.labels_
labels2_w = km_3_Wv.predict(df_W)
v_scores_w.append(v_measure_score(df_W['4'], labels2_w)) 

km_4_W = KMeans(4).fit(X_W)
km_4_Wv = KMeans(4).fit(df_W)
True_lables3_w =km_4_Wv.labels_
labels3_w = km_4_Wv.predict(df_W)
v_scores_w.append(v_measure_score(df_W['4'], labels3_w)) 

km_5_W = KMeans(5).fit(X_W)
km_5_Wv = KMeans(5).fit(df_W)
True_lables4_w =km_5_Wv.labels_
labels4_w = km_5_Wv.predict(df_W)
v_scores_w.append(v_measure_score(df_W['4'], labels4_w)) 

km_6_W = KMeans(6).fit(X_W)
km_6_Wv = KMeans(6).fit(df_W)
True_lables5_w =km_6_Wv.labels_
labels5_w = km_6_Wv.predict(df_W)
v_scores_w.append(v_measure_score(df_W['4'], labels5_w)) 

km_7_W = KMeans(7).fit(X_W)
km_7_Wv = KMeans(7).fit(df_W)
True_lables6_w =km_7_Wv.labels_
labels6_w = km_7_Wv.predict(df_W)
v_scores_w.append(v_measure_score(df_W['4'], labels6_w)) 

km_8_W = KMeans(8).fit(X_W)
km_8_Wv = KMeans(8).fit(df_W)
True_lables7_w =km_8_Wv.labels_
labels7_w = km_8_Wv.predict(df_W)
v_scores_w.append(v_measure_score(df_W['4'], labels7_w)) 


v_scores_su = [] 

N_Clusters = [2, 3, 4, 5, 6, 7, 8] 

km_2_SU = KMeans(2).fit(X_SU)
km_2_SUv = KMeans(2).fit(df_SU)
True_lables1_su =km_2_SUv.labels_
labels1_su = km_2_SUv.predict(df_SU)
v_scores_su.append(v_measure_score(df_SU['4'], labels1_su)) 

km_3_SU = KMeans(3).fit(X_SU)
km_3_SUv = KMeans(3).fit(df_SU)
True_lables2_su =km_3_SUv.labels_
labels2_su = km_3_SUv.predict(df_SU)
v_scores_su.append(v_measure_score(df_SU['4'], labels2_su)) 

km_4_SU = KMeans(4).fit(X_SU)
km_4_SUv = KMeans(4).fit(df_SU)
True_lables3_su =km_4_SUv.labels_
labels3_su = km_4_SUv.predict(df_SU)
v_scores_su.append(v_measure_score(df_SU['4'], labels3_su)) 

km_5_SU = KMeans(5).fit(X_SU)
km_5_SUv = KMeans(5).fit(df_SU)
True_lables4_su =km_5_SUv.labels_
labels4_su = km_5_SUv.predict(df_SU)
v_scores_su.append(v_measure_score(df_SU['4'], labels4_su)) 

km_6_SU = KMeans(6).fit(X_SU)
km_6_SUv = KMeans(6).fit(df_SU)
True_lables5_su =km_6_SUv.labels_
labels5_su = km_6_SUv.predict(df_SU)
v_scores_su.append(v_measure_score(df_SU['4'], labels5_su)) 

km_7_SU = KMeans(7).fit(X_SU)
km_7_SUv = KMeans(7).fit(df_SU)
True_lables6_su =km_7_SUv.labels_
labels6_su = km_7_SUv.predict(df_SU)
v_scores_su.append(v_measure_score(df_SU['4'], labels6_su)) 

km_8_SU = KMeans(8).fit(X_SU)
km_8_SUv = KMeans(8).fit(df_SU)
True_lables7_su =km_8_SUv.labels_
labels7_su = km_8_SUv.predict(df_SU)
v_scores_su.append(v_measure_score(df_SU['4'], labels7_su)) 

v_scores_sp = [] 

N_Clusters = [2, 3, 4, 5, 6, 7, 8] 

km_2_SP = KMeans(2).fit(X_SP)
km_2_SPv = KMeans(2).fit(df_SP)
True_lables1_sp =km_2_SPv.labels_
labels1_sp = km_2_SPv.predict(df_SP)
v_scores_sp.append(v_measure_score(df_SP['1'], labels1_sp)) 

km_3_SP = KMeans(3).fit(X_SP)
km_3_SPv = KMeans(3).fit(df_SP)
True_lables2_sp =km_3_SPv.labels_
labels2_sp = km_3_SPv.predict(df_SP)
v_scores_sp.append(v_measure_score(df_SP['1'], labels2_sp)) 

km_4_SP = KMeans(4).fit(X_SP)
km_4_SPv = KMeans(4).fit(df_SP)
True_lables3_sp =km_4_SPv.labels_
labels3_sp = km_4_SPv.predict(df_SP)
v_scores_sp.append(v_measure_score(df_SP['1'], labels3_sp)) 

km_5_SP = KMeans(5).fit(X_SP)
km_5_SPv = KMeans(5).fit(df_SP)
True_lables4_sp =km_5_SPv.labels_
labels4_sp = km_5_SPv.predict(df_SP)
v_scores_sp.append(v_measure_score(df_SP['1'], labels4_sp)) 

km_6_SP = KMeans(6).fit(X_SP)
km_6_SPv = KMeans(6).fit(df_SP)
True_lables5_sp =km_6_SPv.labels_
labels5_sp = km_6_SPv.predict(df_SP)
v_scores_sp.append(v_measure_score(df_SP['1'], labels5_sp)) 

km_7_SP = KMeans(7).fit(X_SP)
km_7_SPv = KMeans(7).fit(df_SP)
True_lables6_sp =km_7_SPv.labels_
labels6_sp = km_7_SPv.predict(df_SP)
v_scores_sp.append(v_measure_score(df_SP['1'], labels6_sp)) 

km_8_SP = KMeans(8).fit(X_SP)
km_8_SPv = KMeans(8).fit(df_SP)
True_lables7_sp =km_8_SPv.labels_
labels7_sp = km_8_SPv.predict(df_SP)
v_scores_sp.append(v_measure_score(df_SP['1'], labels7_sp)) 
In [ ]:
#Plotting a Bar Graph to compare the models 
plt.bar(N_Clusters, v_scores) 
plt.xlabel('Number of Clusters') 
plt.ylabel('V-Measure Score') 
plt.title('Comparison of different Clustering Models') 
plt.show() 
In [ ]:
print('k = 3 the score is ',silhouette_score(X_A, km_3_A.labels_, metric = 'euclidean'),'\n',
      'k = 4 the score is ',silhouette_score(X_A, km_4_A.labels_, metric = 'euclidean'),'\n',
      'k = 5 the score is ',silhouette_score(X_A, km_5_A.labels_, metric = 'euclidean'),'\n',
      'k = 6 the score is ',silhouette_score(X_A, km_6_A.labels_, metric = 'euclidean'),'\n',
      'k = 7 the score is ',silhouette_score(X_A, km_7_A.labels_, metric = 'euclidean'),'\n',
      'k = 8 the score is ',silhouette_score(X_A, km_8_A.labels_, metric = 'euclidean'))
k = 3 the score is  0.09696423458336328 
 k = 4 the score is  0.02378857720758085 
 k = 5 the score is  0.14446423642212503 
 k = 6 the score is  0.08792512247524707 
 k = 7 the score is  0.06444484359919744 
 k = 8 the score is  0.06438325465541055
In [ ]:
#Plotting a Bar Graph to compare the models 
plt.bar(N_Clusters, v_scores_su) 
plt.xlabel('Number of Clusters') 
plt.ylabel('V-Measure Score') 
plt.title('Comparison of different Clustering Models') 
plt.show()
In [ ]:
print('k = 3 the score is ',silhouette_score(X_SU, km_3_SU.labels_, metric = 'euclidean'),'\n',
      'k = 4 the score is ',silhouette_score(X_SU, km_4_SU.labels_, metric = 'euclidean'),'\n',
      'k = 5 the score is ',silhouette_score(X_SU, km_5_SU.labels_, metric = 'euclidean'),'\n',
      'k = 6 the score is ',silhouette_score(X_SU, km_6_SU.labels_, metric = 'euclidean'),'\n',
      'k = 7 the score is ',silhouette_score(X_SU, km_7_SU.labels_, metric = 'euclidean'),'\n',
      'k = 8 the score is ',silhouette_score(X_SU, km_8_SU.labels_, metric = 'euclidean'))
k = 3 the score is  0.18208249782720262 
 k = 4 the score is  0.17854291902801267 
 k = 5 the score is  0.09046847657042151 
 k = 6 the score is  0.11798283672474962 
 k = 7 the score is  0.06431312980381781 
 k = 8 the score is  0.08528571071108727
In [ ]:
#Plotting a Bar Graph to compare the models 
plt.bar(N_Clusters, v_scores_w) 
plt.xlabel('Number of Clusters') 
plt.ylabel('V-Measure Score') 
plt.title('Comparison of different Clustering Models') 
plt.show() 
In [ ]:
print('k = 3 the score is ',silhouette_score(X_W, km_3_W.labels_, metric = 'euclidean'),'\n',
      'k = 4 the score is ',silhouette_score(X_W, km_4_W.labels_, metric = 'euclidean'),'\n',
      'k = 5 the score is ',silhouette_score(X_W, km_5_W.labels_, metric = 'euclidean'),'\n',
      'k = 6 the score is ',silhouette_score(X_W, km_6_W.labels_, metric = 'euclidean'),'\n',
      'k = 7 the score is ',silhouette_score(X_W, km_7_W.labels_, metric = 'euclidean'),'\n',
      'k = 8 the score is ',silhouette_score(X_W, km_8_W.labels_, metric = 'euclidean'))
k = 3 the score is  0.09795699749863168 
 k = 4 the score is  0.09820749479553509 
 k = 5 the score is  0.1419920320640972 
 k = 6 the score is  0.10573179136985347 
 k = 7 the score is  0.021534598121551306 
 k = 8 the score is  0.0912888822861899
In [ ]:
#Plotting a Bar Graph to compare the models 
plt.bar(N_Clusters, v_scores_sp) 
plt.xlabel('Number of Clusters') 
plt.ylabel('V-Measure Score') 
plt.title('Comparison of different Clustering Models') 
plt.show() 
In [ ]:
print('k = 3 the score is ',silhouette_score(X_SP, km_3_SP.labels_, metric = 'euclidean'),'\n',
      'k = 4 the score is ',silhouette_score(X_SP, km_4_SP.labels_, metric = 'euclidean'),'\n',
      'k = 5 the score is ',silhouette_score(X_SP, km_5_SP.labels_, metric = 'euclidean'),'\n',
      'k = 6 the score is ',silhouette_score(X_SP, km_6_SP.labels_, metric = 'euclidean'),'\n',
      'k = 7 the score is ',silhouette_score(X_SP, km_7_SP.labels_, metric = 'euclidean'),'\n',
      'k = 8 the score is ',silhouette_score(X_SP, km_8_SP.labels_, metric = 'euclidean'))
k = 3 the score is  0.1148693715551911 
 k = 4 the score is  0.1205823089913749 
 k = 5 the score is  0.12967976403118317 
 k = 6 the score is  0.08085608528226887 
 k = 7 the score is  0.06712110183898591 
 k = 8 the score is  0.08943867565694505
In [ ]:
plt.plot(X_A.columns, km_4_A.cluster_centers_[0], alpha=0.7, lw=1, color = 'blue',linestyle='dotted')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 0') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 0')
In [ ]:
plt.plot(X_A.columns, km_4_A.cluster_centers_[1], alpha=0.7, lw=1, color = 'green',linestyle='dotted')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 1') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 1')
In [ ]:
plt.plot(X_A.columns, km_4_A.cluster_centers_[2], alpha=0.7, lw=1, color = 'red',linestyle='dotted')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 2') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 2')
In [ ]:
plt.plot(X_A.columns, km_4_A.cluster_centers_[3], alpha=0.7, lw=1, color = 'orange',linestyle='dotted')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('Cluster 3') 
Out[ ]:
Text(0.5, 1.0, 'Cluster 3')
In [ ]:
#plt.figure(figsize=(5,5))
plt.plot(X_A.columns, km_4_A.cluster_centers_[1], alpha=0.8, lw=2, color = 'green')
plt.plot(X_A.columns, km_4_A.cluster_centers_[2], alpha=0.8, lw=2, color = 'red')
plt.plot(X_A.columns, km_4_A.cluster_centers_[3], alpha=0.8, lw=1, color = 'orange')
plt.plot(X_A.columns, km_4_A.cluster_centers_[0], alpha=0.8, lw=1, color = 'blue')
plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('All Clusters overlapping') 
Out[ ]:
Text(0.5, 1.0, 'All Clusters overlapping')
In [ ]:
plt.figure(figsize=(5,5))

plt.plot(X_W.columns, km_4_W.cluster_centers_[0], alpha=0.8, lw=1, color = 'blue')
plt.plot(X_W.columns, km_4_W.cluster_centers_[1], alpha=0.8, lw=2, color = 'green')
plt.plot(X_W.columns, km_4_W.cluster_centers_[2], alpha=0.8, lw=2, color = 'red')
plt.plot(X_W.columns, km_4_W.cluster_centers_[3], alpha=0.8, lw=1, color = 'orange')


plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('All Clusters overlapping') 
Out[ ]:
Text(0.5, 1.0, 'All Clusters overlapping')
In [ ]:
plt.figure(figsize=(10,7))

plt.plot(X_SU.columns, km_4_SU.cluster_centers_[3], alpha=0.8, lw=1, color = 'orange')
plt.plot(X_SU.columns, km_4_SU.cluster_centers_[1], alpha=0.8, lw=2, color = 'green')
plt.plot(X_SU.columns, km_4_SU.cluster_centers_[2], alpha=0.8, lw=2, color = 'red')
plt.plot(X_SU.columns, km_4_SU.cluster_centers_[0], alpha=0.8, lw=1, color = 'blue')


plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('All Clusters overlapping') 
Out[ ]:
Text(0.5, 1.0, 'All Clusters overlapping')
In [ ]:
plt.figure(figsize=(10,7))

plt.plot(X_SP.columns, km_4_SP.cluster_centers_[1], alpha=0.8, lw=2, color = 'green')

plt.plot(X_SP.columns, km_4_SP.cluster_centers_[2], alpha=0.8, lw=2, color = 'red')

plt.plot(X_SP.columns, km_4_SP.cluster_centers_[0], alpha=0.8, lw=1, color = 'blue')
plt.plot(X_SP.columns, km_4_SP.cluster_centers_[3], alpha=0.8, lw=1, color = 'orange')

plt.gca()
plt.tight_layout()
#plt.legend()
plt.xlabel('Timeline') 
plt.ylabel('Energy Consumption (kW)') 
plt.title('All Clusters overlapping') 
Out[ ]:
Text(0.5, 1.0, 'All Clusters overlapping')
In [ ]:
%%time
hc_A = AgglomerativeClustering(n_clusters = 4, affinity = 'euclidean', linkage ='ward')
X_hc_A=hc_A.fit(X_A)
pd.Series.value_counts(X_hc_A.labels_)
CPU times: user 144 ms, sys: 752 µs, total: 145 ms
Wall time: 154 ms
In [ ]:
estimate_bandwidth(X_A.values, quantile=0.5)
Out[ ]:
25.690372807766337
In [ ]:
ms_A = MeanShift(bandwidth=39, cluster_all=True).fit(X_A)
pd.Series.value_counts(ms_A.labels_)
Out[ ]:
0    296
3      1
2      1
1      1
dtype: int64
In [ ]:
#km_4_W = KMeans(4).fit(X_W)
#pd.Series.value_counts(km_4_W.labels_)
In [ ]:
km_4_A = KMeans(4).fit(X_A)
pd.Series.value_counts(km_4_A.labels_)
Out[ ]:
1    152
0    105
2     37
3      5
dtype: int64
In [ ]:
X_A['cluster'] = km_4_A.labels_

#X_SU['cluster'] = km_4_SU.labels_

#X_W['cluster'] = km_4_W.labels_

#X_SP['cluster'] = km_4_SP.labels_
In [ ]:
X_A_C1 = X_A[X_A.cluster == 0]
X_A_C2 = X_A[X_A.cluster == 1]
X_A_C3 = X_A[X_A.cluster == 2]
X_A_C4 = X_A[X_A.cluster == 3]
#X_A_C5 = X_A[X_A.cluster == 4]
#X_A_C6 = X_A[X_A.cluster == 5]
In [ ]:
X_A_C1 = X_A_C1.drop(columns = 'cluster')
X_A_C1 = X_A_C1.transpose()
X_A_C1.head()
Out[ ]:
3 4 7 9 10 13 15 18 19 22 23 24 28 29 30 31 36 37 38 39 40 41 43 44 45 48 49 50 53 60 64 66 72 73 77 83 87 88 90 92 ... 208 211 213 214 216 218 219 221 223 224 226 227 233 235 237 239 240 241 245 247 252 259 261 263 264 265 266 268 272 277 278 282 283 284 288 292 295 296 297 298
Datetime
2013-03-01 00:00:00 0.051 0.099 0.070 0.0 0.018 0.122 0.041 0.077 0.119 0.443 0.017 0.175 0.119 0.051 0.043 0.094 0.077 0.035 0.041 0.046 0.113 0.048 0.095 0.104 0.076 0.175 0.136 0.050 0.096 0.092 0.050 0.063 0.038 0.134 0.071 0.017 0.135 0.128 0.104 0.078 ... 0.105 0.138 0.227 0.113 0.268 0.046 0.318 0.113 0.061 0.038 0.088 0.070 0.121 0.061 0.094 0.099 0.047 0.093 0.078 0.310 0.150 0.059 0.142 0.125 0.044 0.144 0.019 0.103 0.119 0.158 0.146 0.194 0.065 0.049 0.042 0.025 0.094 0.055 0.222 0.120
2013-03-01 00:30:00 0.049 0.089 0.180 0.0 0.040 0.101 0.055 0.088 0.094 0.071 0.053 0.155 0.113 0.081 0.016 0.087 0.248 0.074 0.053 0.043 0.058 0.070 0.119 0.184 0.123 0.163 0.167 0.075 0.105 0.082 0.107 0.075 0.038 0.137 0.083 0.039 0.141 0.082 0.084 0.188 ... 0.072 0.146 0.212 0.113 0.220 0.097 0.071 0.108 0.029 0.006 0.038 0.071 0.113 0.026 0.288 0.096 0.046 0.089 0.079 0.354 0.121 0.046 0.110 0.072 0.029 0.138 0.038 0.108 0.101 0.155 0.159 0.106 0.049 0.037 0.039 0.069 0.077 0.061 0.199 0.091
2013-03-01 01:00:00 0.048 0.098 0.113 0.0 0.029 0.092 0.036 0.086 0.100 0.075 0.009 0.137 0.113 0.094 0.042 0.099 0.047 0.113 0.026 0.042 0.130 0.103 0.126 0.104 0.118 0.100 0.225 0.081 0.104 0.060 0.052 0.088 0.025 0.147 0.086 0.030 0.129 0.118 0.117 0.095 ... 0.094 0.185 0.212 0.113 0.246 0.031 0.063 0.099 0.054 0.019 0.044 0.068 0.061 0.051 0.106 0.116 0.068 0.055 0.062 0.429 0.159 0.075 0.112 0.097 0.041 0.163 0.006 0.051 0.203 0.162 0.085 0.088 0.052 0.019 0.050 0.031 0.098 0.031 0.169 0.073
2013-03-01 01:30:00 0.049 0.102 0.142 0.0 0.035 0.091 0.049 0.074 0.125 0.074 0.048 0.129 0.106 0.068 0.017 0.111 0.053 0.067 0.053 0.057 0.097 0.139 0.102 0.179 0.119 0.144 0.179 0.025 0.074 0.067 0.106 0.075 0.050 0.159 0.074 0.026 0.143 0.087 0.076 0.171 ... 0.102 0.173 0.213 0.113 0.228 0.104 0.080 0.107 0.047 0.019 0.081 0.047 0.060 0.036 0.188 0.111 0.152 0.091 0.065 0.417 0.105 0.047 0.119 0.086 0.036 0.132 0.044 0.095 0.138 0.195 0.085 0.156 0.057 0.026 0.052 0.063 0.069 0.058 0.206 0.074
2013-03-01 02:00:00 0.049 0.102 0.145 0.0 0.041 0.092 0.050 0.074 0.119 0.074 0.022 0.141 0.113 0.086 0.033 0.100 0.082 0.088 0.044 0.095 0.058 0.138 0.118 0.091 0.143 0.175 0.198 0.044 0.067 0.081 0.059 0.088 0.019 0.136 0.082 0.040 0.135 0.097 0.161 0.105 ... 0.083 0.166 0.209 0.138 0.246 0.057 0.054 0.116 0.029 0.006 0.088 0.065 0.091 0.049 0.181 0.085 0.145 0.137 0.069 0.426 0.167 0.053 0.120 0.078 0.033 0.150 0.006 0.116 0.124 0.158 0.210 0.075 0.042 0.016 0.052 0.038 0.091 0.049 0.206 0.074

5 rows × 133 columns

In [ ]:
X_A_C1 = X_A_C1.values
cap = np.percentile(X_A_C1, 97)   
X_A_C1[X_A_C1 > cap] = cap
###########
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_A_C1 = scaler.fit_transform(X_A_C1)
In [ ]:
training_size=int(X_A_C1.shape[0]*0.80)

test_size=(X_A_C1.shape[0])-training_size

train,test=X_A_C1[0:training_size],X_A_C1[training_size:(X_A_C1.shape[0])]
In [ ]:
def get_batches(data, input_interval, target_interval, output_step_offset):
  batched_data_x = []
  batched_data_y = []
  for i in range(input_interval, (len(data) - target_interval - output_step_offset)):
    batched_data_x.append(data[i-input_interval:i])
    batched_data_y.append(data[i+output_step_offset:i+target_interval+output_step_offset])
  batched_data_x, batched_data_y = np.array(batched_data_x), np.array(batched_data_y)
  return batched_data_x, batched_data_y
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)
In [ ]:
print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(3388, 48, 133) (3388, 48, 133)
(740, 48, 133) (740, 48, 133)
In [ ]:
                             ###Building a sequential network:
Model_1 = models.Sequential()
Model_1.add(layers.Dense(500, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_1.add(Dropout(.2))
Model_1.add(BatchNormalization())

Model_1.add(Dense(300))
Model_1.add(Dropout(.2))
Model_1.add(BatchNormalization())

Model_1.add(layers.Dense(200, activation='relu'))
Model_1.add(Dropout(.2))
Model_1.add(BatchNormalization())

Model_1.add((Dense(trainX.shape[2])))
Model_1.compile(optimizer=  'adam', loss='mse', metrics=['mae'])
Model_1.summary()
Model: "sequential_3"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_7 (Dense)              (None, 48, 500)           67000     
_________________________________________________________________
dropout_5 (Dropout)          (None, 48, 500)           0         
_________________________________________________________________
batch_normalization_5 (Batch (None, 48, 500)           2000      
_________________________________________________________________
dense_8 (Dense)              (None, 48, 300)           150300    
_________________________________________________________________
dropout_6 (Dropout)          (None, 48, 300)           0         
_________________________________________________________________
batch_normalization_6 (Batch (None, 48, 300)           1200      
_________________________________________________________________
dense_9 (Dense)              (None, 48, 200)           60200     
_________________________________________________________________
dropout_7 (Dropout)          (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_7 (Batch (None, 48, 200)           800       
_________________________________________________________________
dense_10 (Dense)             (None, 48, 133)           26733     
=================================================================
Total params: 308,233
Trainable params: 306,233
Non-trainable params: 2,000
_________________________________________________________________
In [ ]:
model_train = Model_1.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=64)
Epoch 1/30
48/48 [==============================] - 1s 25ms/step - loss: 0.4266 - mae: 0.4518 - val_loss: 0.0282 - val_mae: 0.1098
Epoch 2/30
48/48 [==============================] - 1s 19ms/step - loss: 0.1415 - mae: 0.2373 - val_loss: 0.0210 - val_mae: 0.0996
Epoch 3/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0815 - mae: 0.1760 - val_loss: 0.0209 - val_mae: 0.1000
Epoch 4/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0498 - mae: 0.1379 - val_loss: 0.0207 - val_mae: 0.0995
Epoch 5/30
48/48 [==============================] - 1s 17ms/step - loss: 0.0331 - mae: 0.1139 - val_loss: 0.0204 - val_mae: 0.0982
Epoch 6/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0242 - mae: 0.0992 - val_loss: 0.0201 - val_mae: 0.0966
Epoch 7/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0195 - mae: 0.0900 - val_loss: 0.0197 - val_mae: 0.0948
Epoch 8/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0170 - mae: 0.0845 - val_loss: 0.0194 - val_mae: 0.0920
Epoch 9/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0156 - mae: 0.0810 - val_loss: 0.0191 - val_mae: 0.0908
Epoch 10/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0148 - mae: 0.0790 - val_loss: 0.0188 - val_mae: 0.0894
Epoch 11/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0143 - mae: 0.0777 - val_loss: 0.0185 - val_mae: 0.0871
Epoch 12/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0140 - mae: 0.0768 - val_loss: 0.0183 - val_mae: 0.0861
Epoch 13/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0139 - mae: 0.0763 - val_loss: 0.0183 - val_mae: 0.0864
Epoch 14/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0137 - mae: 0.0758 - val_loss: 0.0178 - val_mae: 0.0844
Epoch 15/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0135 - mae: 0.0754 - val_loss: 0.0177 - val_mae: 0.0849
Epoch 16/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0134 - mae: 0.0750 - val_loss: 0.0176 - val_mae: 0.0853
Epoch 17/30
48/48 [==============================] - 1s 17ms/step - loss: 0.0133 - mae: 0.0747 - val_loss: 0.0173 - val_mae: 0.0845
Epoch 18/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0132 - mae: 0.0743 - val_loss: 0.0170 - val_mae: 0.0838
Epoch 19/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0130 - mae: 0.0742 - val_loss: 0.0171 - val_mae: 0.0847
Epoch 20/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0129 - mae: 0.0738 - val_loss: 0.0172 - val_mae: 0.0848
Epoch 21/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0128 - mae: 0.0735 - val_loss: 0.0170 - val_mae: 0.0844
Epoch 22/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0127 - mae: 0.0733 - val_loss: 0.0169 - val_mae: 0.0847
Epoch 23/30
48/48 [==============================] - 1s 19ms/step - loss: 0.0126 - mae: 0.0730 - val_loss: 0.0171 - val_mae: 0.0847
Epoch 24/30
48/48 [==============================] - 1s 19ms/step - loss: 0.0125 - mae: 0.0727 - val_loss: 0.0169 - val_mae: 0.0845
Epoch 25/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0124 - mae: 0.0725 - val_loss: 0.0171 - val_mae: 0.0849
Epoch 26/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0123 - mae: 0.0723 - val_loss: 0.0170 - val_mae: 0.0850
Epoch 27/30
48/48 [==============================] - 1s 18ms/step - loss: 0.0122 - mae: 0.0720 - val_loss: 0.0169 - val_mae: 0.0845
Epoch 28/30
48/48 [==============================] - 1s 17ms/step - loss: 0.0121 - mae: 0.0718 - val_loss: 0.0170 - val_mae: 0.0848
Epoch 29/30
48/48 [==============================] - 1s 17ms/step - loss: 0.0120 - mae: 0.0715 - val_loss: 0.0171 - val_mae: 0.0853
Epoch 30/30
48/48 [==============================] - 1s 17ms/step - loss: 0.0119 - mae: 0.0714 - val_loss: 0.0171 - val_mae: 0.0851
In [ ]:
Seq_train = Model_1.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_1.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
106/106 [==============================] - 1s 5ms/step
[[0.07385854 0.13282737 0.13363612 ... 0.0390663  0.20247729 0.11257686]
 [0.06825309 0.12202187 0.12432019 ... 0.02741822 0.18772203 0.09909105]
 [0.0647525  0.11881603 0.12810741 ... 0.04249258 0.18975273 0.09140769]
 ...
 [0.08167756 0.18882366 0.13921122 ... 0.0825159  0.19430986 0.16113308]
 [0.09073979 0.16626406 0.13552427 ... 0.05323245 0.1757489  0.15731743]
 [0.07594451 0.15329191 0.13489068 ... 0.06932415 0.18274578 0.12504129]] (3388, 48, 133)
24/24 [==============================] - 0s 5ms/step
[[0.07132924 0.16776034 0.16906518 ... 0.20779836 0.1868708  0.13590021]
 [0.06438522 0.15860535 0.15624206 ... 0.17816761 0.19703434 0.10587574]
 [0.08111624 0.18019307 0.18143255 ... 0.20462942 0.19867897 0.11227204]
 ...
 [0.06461367 0.13575366 0.18213049 ... 0.22405696 0.2098509  0.16216514]
 [0.06079008 0.12094098 0.16507201 ... 0.17546034 0.2129781  0.11857423]
 [0.06166706 0.13457723 0.15796    ... 0.17687693 0.21280773 0.12307512]] (740, 48, 133)
In [ ]:
def rmse(actual, pred):
    return np.sqrt(((pred - actual) ** 2).mean())
def mae(actual, pred):
    return np.mean(np.abs(actual - pred))
In [ ]:
testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))

testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
Test Score: 0.11 MAE
Test Score: 0.17 RMSE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
lr_decay = callbacks.LearningRateScheduler(schedule=lambda epoch: 0.001 * (0.80 ** epoch))
In [ ]:
model = Sequential()
model.add(LSTM(300, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
model.add(Dropout(.1))


#model.add(LSTM(100, activation='relu', return_sequences=True))
#model.add(Dropout(.2))


#model.add(Dropout(.01))
#model.add(LSTM(50, activation='relu', return_sequences = True))
#model.add(Dropout(.01))

model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_2 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_5"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_2 (LSTM)                (None, 48, 300)           520800    
_________________________________________________________________
dropout_10 (Dropout)         (None, 48, 300)           0         
_________________________________________________________________
dense_12 (Dense)             (None, 48, 133)           40033     
=================================================================
Total params: 560,833
Trainable params: 560,833
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_A_C1 = model.fit(trainX,trainY, epochs=30, validation_split = 0.05,batch_size = 64, callbacks=[lr_decay])
Epoch 1/30
51/51 [==============================] - 7s 137ms/step - loss: 0.0193 - mae: 0.0932 - val_loss: 0.0196 - val_mae: 0.0915
Epoch 2/30
51/51 [==============================] - 7s 129ms/step - loss: 0.0150 - mae: 0.0805 - val_loss: 0.0184 - val_mae: 0.0879
Epoch 3/30
51/51 [==============================] - 7s 128ms/step - loss: 0.0141 - mae: 0.0775 - val_loss: 0.0181 - val_mae: 0.0872
Epoch 4/30
51/51 [==============================] - 7s 131ms/step - loss: 0.0136 - mae: 0.0761 - val_loss: 0.0178 - val_mae: 0.0869
Epoch 5/30
51/51 [==============================] - 6s 127ms/step - loss: 0.0133 - mae: 0.0752 - val_loss: 0.0178 - val_mae: 0.0866
Epoch 6/30
51/51 [==============================] - 7s 131ms/step - loss: 0.0131 - mae: 0.0745 - val_loss: 0.0177 - val_mae: 0.0867
Epoch 7/30
51/51 [==============================] - 7s 129ms/step - loss: 0.0129 - mae: 0.0740 - val_loss: 0.0177 - val_mae: 0.0867
Epoch 8/30
51/51 [==============================] - 7s 135ms/step - loss: 0.0128 - mae: 0.0736 - val_loss: 0.0176 - val_mae: 0.0867
Epoch 9/30
51/51 [==============================] - 6s 126ms/step - loss: 0.0127 - mae: 0.0733 - val_loss: 0.0176 - val_mae: 0.0862
Epoch 10/30
51/51 [==============================] - 6s 126ms/step - loss: 0.0126 - mae: 0.0731 - val_loss: 0.0176 - val_mae: 0.0864
Epoch 11/30
51/51 [==============================] - 6s 127ms/step - loss: 0.0125 - mae: 0.0729 - val_loss: 0.0176 - val_mae: 0.0863
Epoch 12/30
51/51 [==============================] - 7s 129ms/step - loss: 0.0125 - mae: 0.0727 - val_loss: 0.0176 - val_mae: 0.0864
Epoch 13/30
51/51 [==============================] - 7s 132ms/step - loss: 0.0124 - mae: 0.0726 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 14/30
51/51 [==============================] - 7s 130ms/step - loss: 0.0124 - mae: 0.0725 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 15/30
51/51 [==============================] - 7s 131ms/step - loss: 0.0124 - mae: 0.0724 - val_loss: 0.0176 - val_mae: 0.0864
Epoch 16/30
51/51 [==============================] - 7s 132ms/step - loss: 0.0123 - mae: 0.0724 - val_loss: 0.0176 - val_mae: 0.0864
Epoch 17/30
51/51 [==============================] - 7s 130ms/step - loss: 0.0123 - mae: 0.0723 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 18/30
51/51 [==============================] - 7s 129ms/step - loss: 0.0123 - mae: 0.0723 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 19/30
51/51 [==============================] - 7s 133ms/step - loss: 0.0123 - mae: 0.0722 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 20/30
51/51 [==============================] - 7s 129ms/step - loss: 0.0123 - mae: 0.0722 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 21/30
51/51 [==============================] - 7s 132ms/step - loss: 0.0123 - mae: 0.0722 - val_loss: 0.0176 - val_mae: 0.0864
Epoch 22/30
51/51 [==============================] - 7s 129ms/step - loss: 0.0123 - mae: 0.0722 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 23/30
51/51 [==============================] - 7s 129ms/step - loss: 0.0123 - mae: 0.0722 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 24/30
51/51 [==============================] - 7s 132ms/step - loss: 0.0123 - mae: 0.0721 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 25/30
51/51 [==============================] - 6s 127ms/step - loss: 0.0123 - mae: 0.0721 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 26/30
51/51 [==============================] - 6s 126ms/step - loss: 0.0123 - mae: 0.0721 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 27/30
51/51 [==============================] - 7s 131ms/step - loss: 0.0123 - mae: 0.0721 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 28/30
51/51 [==============================] - 7s 130ms/step - loss: 0.0123 - mae: 0.0721 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 29/30
51/51 [==============================] - 7s 128ms/step - loss: 0.0122 - mae: 0.0721 - val_loss: 0.0176 - val_mae: 0.0865
Epoch 30/30
51/51 [==============================] - 7s 133ms/step - loss: 0.0123 - mae: 0.0721 - val_loss: 0.0176 - val_mae: 0.0865
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_A_C1.history['loss'], label='train')
plt.plot(history_A_C1.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
#fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

#plt.plot(history_A_C1.history['mae'], label='train')
#plt.plot(history_A_C1.history['val_mae'], label='val')
#plt.ylabel('Pérdida')
#plt.xlabel('Epoch')
#plt.legend()
#plt.show()
In [ ]:
testingtrain_C1 = model.predict(trainX, verbose = 1)
print(testingtrain_C1[0], testingtrain_C1.shape)

testingtest_C1 = model.predict(testX, verbose = 1)
print(testingtest_C1[0], testingtest_C1.shape)
106/106 [==============================] - 2s 15ms/step
[[0.05725782 0.11818758 0.11772346 ... 0.08905089 0.13943034 0.08428405]
 [0.06344792 0.11983794 0.11368144 ... 0.08984522 0.14352766 0.10086548]
 [0.06752517 0.12275964 0.11414133 ... 0.08149987 0.1486308  0.10766749]
 ...
 [0.10935073 0.19578294 0.1649664  ... 0.08750921 0.18225256 0.18814467]
 [0.10306565 0.19269593 0.14504361 ... 0.06304262 0.17499185 0.17840864]
 [0.10031977 0.19058631 0.13143794 ... 0.04658211 0.17475724 0.1600561 ]] (3388, 48, 133)
24/24 [==============================] - 0s 14ms/step
[[0.05826722 0.13814816 0.10573946 ... 0.15756267 0.14471398 0.10729596]
 [0.06103129 0.15873775 0.11369194 ... 0.18236315 0.1783177  0.11937745]
 [0.0676216  0.18019646 0.10811441 ... 0.20454434 0.17782792 0.12819728]
 ...
 [0.04421716 0.13796182 0.10961401 ... 0.20304789 0.20206353 0.1816512 ]
 [0.04545278 0.15034427 0.12377623 ... 0.18755022 0.215868   0.1836374 ]
 [0.0430056  0.15362725 0.13282771 ... 0.16675903 0.22294644 0.17023975]] (740, 48, 133)
In [ ]:
def rmse(actual, pred):
    return np.sqrt(((pred - actual) ** 2).mean())
def mae(actual, pred):
    return np.mean(np.abs(actual - pred))
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
Train Score: 0.12 RMSE
Test Score: 0.17 RMSE
In [ ]:
trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.08 MAE
Test Score: 0.11 MAE
In [ ]:
X_A_C2 = X_A_C2.drop(columns = 'cluster')
X_A_C2 = X_A_C2.transpose()
X_A_C2.head()
Out[ ]:
5 6 8 11 12 14 16 20 25 27 32 34 42 47 51 54 55 57 59 61 63 65 67 69 70 71 74 76 78 79 80 86 95 96 100 102 106 107 111 112 ... 201 203 207 209 210 215 217 220 222 225 229 230 231 232 234 236 238 242 243 244 249 250 254 257 258 260 262 270 271 273 274 275 276 279 280 286 287 290 291 293
Datetime
2013-03-01 00:00:00 0.007 0.262 0.156 0.150 0.229 0.144 0.186 0.313 0.113 0.489 0.040 0.169 0.344 0.127 0.112 0.147 0.456 0.138 0.106 0.305 0.169 0.272 0.638 0.399 0.203 0.111 0.273 0.206 0.250 0.125 0.038 0.150 0.106 0.187 0.081 0.170 0.088 0.252 0.072 0.096 ... 0.132 0.148 0.510 0.085 0.183 0.121 0.134 0.069 0.232 0.085 0.079 0.319 0.125 0.075 0.300 0.119 0.300 0.121 0.182 0.188 0.223 0.398 0.439 0.141 0.175 0.0 0.224 0.169 0.604 0.087 0.106 0.150 0.121 0.156 0.581 0.169 0.200 0.129 0.236 0.134
2013-03-01 00:30:00 0.006 0.325 0.095 0.152 0.243 0.106 0.189 0.188 0.063 0.310 0.038 0.200 0.281 0.085 0.136 0.204 0.516 0.138 0.139 0.141 0.238 0.227 0.376 0.285 0.314 0.076 0.177 0.175 0.288 0.125 0.050 0.138 2.557 0.158 0.250 0.147 0.103 0.225 0.070 0.100 ... 0.131 0.155 1.422 0.086 0.144 0.075 0.062 0.060 0.273 0.142 0.108 0.200 0.171 0.044 0.275 0.119 0.250 0.124 0.179 0.163 0.229 0.263 0.148 0.155 0.263 0.0 0.257 0.197 0.485 0.074 0.251 0.150 0.093 0.100 0.159 0.188 0.175 0.086 0.258 1.982
2013-03-01 01:00:00 0.006 0.244 0.151 0.250 0.209 0.089 0.146 0.213 0.088 0.759 0.028 0.150 0.388 0.117 0.148 0.235 0.429 0.138 0.109 0.157 0.213 0.265 0.224 0.239 0.251 0.064 0.141 0.206 0.269 0.100 0.031 0.150 0.381 0.171 0.313 0.161 0.105 0.234 0.099 0.044 ... 0.129 0.131 0.945 0.081 0.216 0.108 0.083 0.084 0.221 0.080 0.148 0.219 0.137 0.075 0.168 0.119 0.219 0.108 0.176 0.138 0.229 0.256 0.129 0.141 0.138 0.0 0.356 0.207 0.143 0.090 0.266 0.156 0.078 0.113 0.736 0.135 0.113 0.165 0.245 1.749
2013-03-01 01:30:00 0.007 0.246 0.099 0.165 0.165 0.110 0.157 0.213 0.088 0.117 0.027 0.144 0.275 0.090 0.140 0.189 0.253 0.188 0.133 0.147 0.256 0.229 0.216 0.147 0.224 0.112 0.067 0.150 0.250 0.119 0.056 0.113 0.119 0.201 0.188 0.157 0.102 0.244 0.061 0.113 ... 0.143 0.115 0.243 0.097 0.171 0.089 0.141 0.061 0.185 0.143 0.118 0.156 0.148 0.038 0.195 0.106 0.219 0.082 0.175 0.125 0.181 0.258 0.196 0.159 0.064 0.0 0.291 0.236 0.540 0.065 0.178 0.188 0.072 0.125 0.505 0.159 0.150 0.137 0.199 0.143
2013-03-01 02:00:00 0.006 0.277 0.163 0.194 0.181 0.083 0.136 0.200 0.075 0.105 0.034 0.144 0.306 0.092 0.111 0.301 0.322 0.188 0.111 0.120 0.119 0.252 0.302 0.226 0.282 0.089 0.061 0.206 0.300 0.100 0.031 0.138 0.119 0.163 0.394 0.158 0.121 0.214 0.065 0.044 ... 0.146 0.148 0.272 0.092 0.265 0.093 0.151 0.068 0.184 0.085 0.114 0.244 0.217 0.081 0.162 0.144 0.206 0.105 0.276 0.163 0.214 0.267 0.105 0.152 0.125 0.0 0.285 0.202 0.128 0.046 0.257 0.169 0.050 0.094 0.174 0.128 0.088 0.157 0.204 0.135

5 rows × 107 columns

In [ ]:
X_A_C2 = X_A_C2.values
cap = np.percentile(X_A_C2, 97)   
X_A_C2[X_A_C2 > cap] = cap
In [ ]:
training_size=int(X_A_C2.shape[0]*0.80)

test_size=(X_A_C2.shape[0])-training_size

train,test=X_A_C2[0:training_size],X_A_C2[training_size:(X_A_C2.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)
In [ ]:
print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3388, 48, 107) (3388, 48, 107) 
 (740, 48, 107) (740, 48, 107)
In [ ]:
                             ###Building a sequential network:
Model_2 = models.Sequential()
Model_2.add(layers.Dense(600, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_2.add(Dropout(.01))
Model_2.add(BatchNormalization())

Model_2.add(Dense(400))
Model_2.add(Dropout(.01))
Model_2.add(BatchNormalization())

Model_2.add(Dense(200))
Model_2.add(Dropout(.01))
Model_2.add(BatchNormalization())

Model_2.add((Dense(trainX.shape[2])))
Model_2.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_2.summary()
Model: "sequential_9"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_23 (Dense)             (None, 48, 600)           64800     
_________________________________________________________________
dropout_18 (Dropout)         (None, 48, 600)           0         
_________________________________________________________________
batch_normalization_15 (Batc (None, 48, 600)           2400      
_________________________________________________________________
dense_24 (Dense)             (None, 48, 400)           240400    
_________________________________________________________________
dropout_19 (Dropout)         (None, 48, 400)           0         
_________________________________________________________________
batch_normalization_16 (Batc (None, 48, 400)           1600      
_________________________________________________________________
dense_25 (Dense)             (None, 48, 200)           80200     
_________________________________________________________________
dropout_20 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_17 (Batc (None, 48, 200)           800       
_________________________________________________________________
dense_26 (Dense)             (None, 48, 107)           21507     
=================================================================
Total params: 411,707
Trainable params: 409,307
Non-trainable params: 2,400
_________________________________________________________________
In [ ]:
model_train = Model_2.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=64)
Epoch 1/30
48/48 [==============================] - 1s 27ms/step - loss: 0.2387 - mae: 0.3472 - val_loss: 0.0651 - val_mae: 0.1654
Epoch 2/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0664 - mae: 0.1831 - val_loss: 0.0600 - val_mae: 0.1891
Epoch 3/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0508 - mae: 0.1614 - val_loss: 0.0574 - val_mae: 0.1788
Epoch 4/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0491 - mae: 0.1581 - val_loss: 0.0562 - val_mae: 0.1716
Epoch 5/30
48/48 [==============================] - 1s 19ms/step - loss: 0.0475 - mae: 0.1555 - val_loss: 0.0555 - val_mae: 0.1675
Epoch 6/30
48/48 [==============================] - 1s 19ms/step - loss: 0.0461 - mae: 0.1532 - val_loss: 0.0546 - val_mae: 0.1624
Epoch 7/30
48/48 [==============================] - 1s 19ms/step - loss: 0.0448 - mae: 0.1510 - val_loss: 0.0530 - val_mae: 0.1572
Epoch 8/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0435 - mae: 0.1487 - val_loss: 0.0525 - val_mae: 0.1565
Epoch 9/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0423 - mae: 0.1468 - val_loss: 0.0518 - val_mae: 0.1535
Epoch 10/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0411 - mae: 0.1446 - val_loss: 0.0524 - val_mae: 0.1543
Epoch 11/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0399 - mae: 0.1426 - val_loss: 0.0526 - val_mae: 0.1549
Epoch 12/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0386 - mae: 0.1405 - val_loss: 0.0523 - val_mae: 0.1536
Epoch 13/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0375 - mae: 0.1384 - val_loss: 0.0525 - val_mae: 0.1542
Epoch 14/30
48/48 [==============================] - 1s 21ms/step - loss: 0.0364 - mae: 0.1365 - val_loss: 0.0532 - val_mae: 0.1558
Epoch 15/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0352 - mae: 0.1343 - val_loss: 0.0534 - val_mae: 0.1562
Epoch 16/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0342 - mae: 0.1326 - val_loss: 0.0543 - val_mae: 0.1579
Epoch 17/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0332 - mae: 0.1308 - val_loss: 0.0546 - val_mae: 0.1583
Epoch 18/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0322 - mae: 0.1289 - val_loss: 0.0549 - val_mae: 0.1597
Epoch 19/30
48/48 [==============================] - 1s 21ms/step - loss: 0.0312 - mae: 0.1272 - val_loss: 0.0558 - val_mae: 0.1615
Epoch 20/30
48/48 [==============================] - 1s 21ms/step - loss: 0.0303 - mae: 0.1254 - val_loss: 0.0561 - val_mae: 0.1617
Epoch 21/30
48/48 [==============================] - 1s 21ms/step - loss: 0.0293 - mae: 0.1235 - val_loss: 0.0567 - val_mae: 0.1636
Epoch 22/30
48/48 [==============================] - 1s 21ms/step - loss: 0.0285 - mae: 0.1220 - val_loss: 0.0572 - val_mae: 0.1637
Epoch 23/30
48/48 [==============================] - 1s 21ms/step - loss: 0.0277 - mae: 0.1203 - val_loss: 0.0578 - val_mae: 0.1652
Epoch 24/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0269 - mae: 0.1189 - val_loss: 0.0585 - val_mae: 0.1666
Epoch 25/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0262 - mae: 0.1174 - val_loss: 0.0588 - val_mae: 0.1672
Epoch 26/30
48/48 [==============================] - 1s 19ms/step - loss: 0.0254 - mae: 0.1158 - val_loss: 0.0594 - val_mae: 0.1688
Epoch 27/30
48/48 [==============================] - 1s 19ms/step - loss: 0.0247 - mae: 0.1143 - val_loss: 0.0603 - val_mae: 0.1697
Epoch 28/30
48/48 [==============================] - 1s 19ms/step - loss: 0.0240 - mae: 0.1128 - val_loss: 0.0605 - val_mae: 0.1696
Epoch 29/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0234 - mae: 0.1114 - val_loss: 0.0613 - val_mae: 0.1716
Epoch 30/30
48/48 [==============================] - 1s 20ms/step - loss: 0.0228 - mae: 0.1101 - val_loss: 0.0623 - val_mae: 0.1725
In [ ]:
Seq_train = Model_2.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_2.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
106/106 [==============================] - 1s 6ms/step
[[ 0.06840135  0.27151066  0.21300232 ...  0.19114114  0.23162124
   0.20910741]
 [ 0.2631586   0.2835018   0.1377318  ...  0.18022642  0.27199244
   0.74965245]
 [ 0.00845356  0.18321493  0.22318766 ...  0.34505364  0.21693715
   0.8001973 ]
 ...
 [-0.09020001  0.34118873  0.44097868 ...  0.30449355  0.33452508
   0.3491758 ]
 [-0.14235961  0.2784098   0.3019313  ...  0.31283757  0.2630142
   0.24670783]
 [ 0.0531769   0.31127834  0.45088655 ...  0.15250169  0.31813264
   0.23948622]] (3388, 48, 107)
24/24 [==============================] - 0s 5ms/step
[[ 1.6561954e-01  2.5513187e-01  3.3106619e-01 ...  3.5386032e-01
   1.9619568e-01  5.9308958e-01]
 [ 4.1135460e-01  4.0129846e-01  3.6895606e-01 ...  3.5880759e-01
   1.9577885e-01  3.0792394e-01]
 [ 2.3808944e-01  3.9521527e-01  3.3298403e-01 ...  2.4894044e-01
   2.0302053e-01  4.1104054e-01]
 ...
 [-1.9045278e-02  2.3803568e-01  3.7280807e-01 ...  1.8500906e-01
   1.1617178e-01  8.9746714e-01]
 [ 4.8533082e-04  3.6450350e-01  2.3040408e-01 ...  2.1710658e-01
   7.7074960e-02  9.5785081e-01]
 [ 1.5862936e-01  3.1371266e-01  2.9191864e-01 ...  4.2301023e-01
   1.8653348e-01  3.8961309e-01]] (740, 48, 107)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.17 RMSE
Test Score: 0.13 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()
model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dense(512))

#model.add(Dense(84))
#model.add(Dropout(.01))
#model.add(LSTM(50, activation='relu', return_sequences = True))
#model.add(Dropout(.01))

model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_3 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_10"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_3 (LSTM)                (None, 48, 200)           246400    
_________________________________________________________________
dense_27 (Dense)             (None, 48, 107)           21507     
=================================================================
Total params: 267,907
Trainable params: 267,907
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C2 = model.fit(trainX,trainY, epochs=30, 
                         validation_split = 0.05, 
                         batch_size = 64, 
                         callbacks=[lr_decay])
51/51 [==============================] - 6s 125ms/step - loss: 0.0398 - mae: 0.1342 - val_loss: 0.0490 - val_mae: 0.1511
Epoch 6/30
51/51 [==============================] - 6s 117ms/step - loss: 0.0390 - mae: 0.1327 - val_loss: 0.0487 - val_mae: 0.1500
Epoch 7/30
51/51 [==============================] - 6s 114ms/step - loss: 0.0384 - mae: 0.1315 - val_loss: 0.0487 - val_mae: 0.1496
Epoch 8/30
51/51 [==============================] - 6s 118ms/step - loss: 0.0379 - mae: 0.1307 - val_loss: 0.0488 - val_mae: 0.1488
Epoch 9/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0375 - mae: 0.1299 - val_loss: 0.0489 - val_mae: 0.1487
Epoch 10/30
51/51 [==============================] - 6s 114ms/step - loss: 0.0372 - mae: 0.1294 - val_loss: 0.0489 - val_mae: 0.1491
Epoch 11/30
51/51 [==============================] - 6s 118ms/step - loss: 0.0369 - mae: 0.1289 - val_loss: 0.0489 - val_mae: 0.1493
Epoch 12/30
51/51 [==============================] - 6s 117ms/step - loss: 0.0367 - mae: 0.1286 - val_loss: 0.0491 - val_mae: 0.1492
Epoch 13/30
51/51 [==============================] - 6s 117ms/step - loss: 0.0366 - mae: 0.1283 - val_loss: 0.0490 - val_mae: 0.1487
Epoch 14/30
51/51 [==============================] - 6s 120ms/step - loss: 0.0364 - mae: 0.1281 - val_loss: 0.0490 - val_mae: 0.1489
Epoch 15/30
51/51 [==============================] - 6s 119ms/step - loss: 0.0364 - mae: 0.1279 - val_loss: 0.0491 - val_mae: 0.1491
Epoch 16/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0363 - mae: 0.1278 - val_loss: 0.0491 - val_mae: 0.1492
Epoch 17/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0362 - mae: 0.1277 - val_loss: 0.0491 - val_mae: 0.1492
Epoch 18/30
51/51 [==============================] - 6s 119ms/step - loss: 0.0362 - mae: 0.1276 - val_loss: 0.0492 - val_mae: 0.1491
Epoch 19/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0361 - mae: 0.1275 - val_loss: 0.0492 - val_mae: 0.1493
Epoch 20/30
51/51 [==============================] - 6s 114ms/step - loss: 0.0361 - mae: 0.1275 - val_loss: 0.0492 - val_mae: 0.1491
Epoch 21/30
51/51 [==============================] - 6s 121ms/step - loss: 0.0360 - mae: 0.1274 - val_loss: 0.0492 - val_mae: 0.1493
Epoch 22/30
51/51 [==============================] - 6s 115ms/step - loss: 0.0360 - mae: 0.1274 - val_loss: 0.0492 - val_mae: 0.1493
Epoch 23/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0360 - mae: 0.1274 - val_loss: 0.0492 - val_mae: 0.1493
Epoch 24/30
51/51 [==============================] - 6s 115ms/step - loss: 0.0360 - mae: 0.1273 - val_loss: 0.0492 - val_mae: 0.1493
Epoch 25/30
51/51 [==============================] - 6s 112ms/step - loss: 0.0360 - mae: 0.1273 - val_loss: 0.0492 - val_mae: 0.1492
Epoch 26/30
51/51 [==============================] - 6s 114ms/step - loss: 0.0360 - mae: 0.1273 - val_loss: 0.0492 - val_mae: 0.1493
Epoch 27/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0360 - mae: 0.1273 - val_loss: 0.0492 - val_mae: 0.1493
Epoch 28/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0360 - mae: 0.1272 - val_loss: 0.0492 - val_mae: 0.1493
Epoch 29/30
51/51 [==============================] - 6s 114ms/step - loss: 0.0359 - mae: 0.1272 - val_loss: 0.0492 - val_mae: 0.1493
Epoch 30/30
51/51 [==============================] - 6s 114ms/step - loss: 0.0359 - mae: 0.1272 - val_loss: 0.0492 - val_mae: 0.1493
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C2.history['loss'], label='train')
plt.plot(history_C2.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C2 = model.predict(trainX, verbose = 1)
print(testingtrain_C2[0], testingtrain_C2.shape)

testingtest_C2 = model.predict(testX, verbose = 1)
print(testingtest_C2[0], testingtest_C2.shape)
106/106 [==============================] - 1s 14ms/step
[[0.12331831 0.2062346  0.22279565 ... 0.1466456  0.16030869 0.16750936]
 [0.13920553 0.26976687 0.22592878 ... 0.21581659 0.18825345 0.3990477 ]
 [0.12261383 0.28728116 0.21812527 ... 0.27320817 0.18997335 0.50088507]
 ...
 [0.09591271 0.46504593 0.35810742 ... 0.12592548 0.41039494 0.34610665]
 [0.05003028 0.42866355 0.29982013 ... 0.1269751  0.38222292 0.30215117]
 [0.03592768 0.37438282 0.26315045 ... 0.09156284 0.30966327 0.25755554]] (3388, 48, 107)
24/24 [==============================] - 0s 13ms/step
[[0.24502683 0.45956272 0.43937767 ... 0.28163174 0.19373576 0.5061805 ]
 [0.23232964 0.45706174 0.4419532  ... 0.26235545 0.21616253 0.55990064]
 [0.24976642 0.44784167 0.435607   ... 0.2614573  0.22442624 0.5934833 ]
 ...
 [0.18295877 0.37663943 0.21086247 ... 0.2273527  0.13705792 0.747294  ]
 [0.16451496 0.37839487 0.19106373 ... 0.22026002 0.12615196 0.7803196 ]
 [0.15992944 0.3872377  0.16812071 ... 0.26116967 0.13241845 0.6645468 ]] (740, 48, 107)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY_RMSE, testingtrain_C2))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.27 RMSE
Test Score: 0.19 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C2[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, testingtest_C2[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
########cluster 3
X_A_C3 = X_A_C3.drop(columns = 'cluster')
X_A_C3 = X_A_C3.transpose()
X_A_C3.head()
Out[ ]:
17 21 26 33 35 46 56 58 62 75 82 84 89 91 94 98 104 109 131 134 139 141 147 149 152 156 157 159 166 185 191 212 228 248 253 255 256 267 294 299 300
Datetime
2013-03-01 00:00:00 0.177 1.168 0.475 0.170 0.207 0.860 0.368 1.486 0.308 0.252 0.144 0.516 0.734 0.181 0.229 0.332 2.170 0.550 0.270 0.132 0.392 0.295 0.213 0.088 0.263 0.150 0.576 0.718 0.704 0.165 0.739 0.285 0.191 1.396 0.758 1.175 0.235 0.263 0.403 0.071 0.958
2013-03-01 00:30:00 0.258 0.708 0.250 0.137 0.162 0.817 0.096 0.202 0.249 0.482 0.161 0.569 0.645 0.204 0.232 0.316 2.883 0.406 0.201 0.100 0.326 0.352 0.150 0.150 0.300 0.100 0.449 0.750 0.702 0.156 0.738 0.242 0.147 0.497 1.252 1.160 0.230 0.134 1.212 0.137 0.535
2013-03-01 01:00:00 0.271 0.678 0.263 0.181 0.176 0.833 0.100 0.181 0.172 1.018 0.134 0.535 0.354 0.200 0.258 0.232 1.780 0.381 0.320 0.145 0.294 0.260 0.200 0.150 0.325 0.163 0.312 0.871 0.201 0.154 0.738 0.378 0.143 0.502 1.131 1.160 0.229 0.064 2.269 0.194 0.284
2013-03-01 01:30:00 0.246 0.238 0.188 0.177 0.211 0.775 0.099 0.163 0.139 1.122 0.152 0.426 0.298 0.183 0.218 0.263 0.349 0.400 0.358 0.101 0.284 0.293 0.138 0.138 0.225 0.188 0.330 0.298 0.189 0.154 0.344 0.264 0.202 2.181 1.122 1.101 0.227 0.044 2.036 0.103 0.243
2013-03-01 02:00:00 0.309 0.172 0.225 0.208 0.187 0.691 0.106 0.175 0.138 0.230 0.125 0.361 0.330 0.115 0.203 0.225 0.407 0.400 0.235 0.117 0.165 0.241 0.175 0.138 0.225 0.113 0.332 0.708 0.197 0.165 0.164 0.248 0.126 0.198 1.064 1.149 0.225 0.065 0.303 0.107 0.488
In [ ]:
X_A_C3 = X_A_C3.values
cap = np.percentile(X_A_C2, 97)   
X_A_C2[X_A_C2 > cap] = cap
In [ ]:
training_size=int(X_A_C3.shape[0]*0.80)

test_size=(X_A_C3.shape[0])-training_size

train,test=X_A_C3[0:training_size],X_A_C3[training_size:(X_A_C3.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)


print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3388, 48, 41) (3388, 48, 41) 
 (740, 48, 41) (740, 48, 41)
In [ ]:
                        ###Building a sequential network:
Model_3 = models.Sequential()
Model_3.add(layers.Dense(200, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_3.add(Dropout(.2))
Model_3.add(BatchNormalization())


Model_3.add(Dense(100))
Model_3.add(Dropout(.2))
Model_3.add(BatchNormalization())


Model_3.add((Dense(trainX.shape[2])))
Model_3.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_3.summary()
Model: "sequential_14"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_38 (Dense)             (None, 48, 200)           8400      
_________________________________________________________________
dropout_28 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_25 (Batc (None, 48, 200)           800       
_________________________________________________________________
dense_39 (Dense)             (None, 48, 100)           20100     
_________________________________________________________________
dropout_29 (Dropout)         (None, 48, 100)           0         
_________________________________________________________________
batch_normalization_26 (Batc (None, 48, 100)           400       
_________________________________________________________________
dense_40 (Dense)             (None, 48, 41)            4141      
=================================================================
Total params: 33,841
Trainable params: 33,241
Non-trainable params: 600
_________________________________________________________________
In [ ]:
model_train = Model_3.fit(trainX,trainY, epochs=30, validation_split = 0.05, batch_size=64)
Epoch 1/30
51/51 [==============================] - 1s 14ms/step - loss: 0.8475 - mae: 0.6994 - val_loss: 0.2274 - val_mae: 0.3313
Epoch 2/30
51/51 [==============================] - 1s 10ms/step - loss: 0.3723 - mae: 0.4568 - val_loss: 0.1316 - val_mae: 0.2490
Epoch 3/30
51/51 [==============================] - 0s 9ms/step - loss: 0.2394 - mae: 0.3568 - val_loss: 0.1186 - val_mae: 0.2545
Epoch 4/30
51/51 [==============================] - 0s 10ms/step - loss: 0.1922 - mae: 0.3153 - val_loss: 0.1152 - val_mae: 0.2482
Epoch 5/30
51/51 [==============================] - 0s 10ms/step - loss: 0.1648 - mae: 0.2879 - val_loss: 0.1115 - val_mae: 0.2417
Epoch 6/30
51/51 [==============================] - 0s 10ms/step - loss: 0.1468 - mae: 0.2679 - val_loss: 0.1091 - val_mae: 0.2365
Epoch 7/30
51/51 [==============================] - 0s 9ms/step - loss: 0.1353 - mae: 0.2542 - val_loss: 0.1062 - val_mae: 0.2312
Epoch 8/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1275 - mae: 0.2444 - val_loss: 0.1043 - val_mae: 0.2276
Epoch 9/30
51/51 [==============================] - 0s 10ms/step - loss: 0.1219 - mae: 0.2372 - val_loss: 0.1026 - val_mae: 0.2239
Epoch 10/30
51/51 [==============================] - 0s 9ms/step - loss: 0.1181 - mae: 0.2323 - val_loss: 0.1020 - val_mae: 0.2221
Epoch 11/30
51/51 [==============================] - 0s 9ms/step - loss: 0.1153 - mae: 0.2284 - val_loss: 0.1011 - val_mae: 0.2201
Epoch 12/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1133 - mae: 0.2261 - val_loss: 0.1005 - val_mae: 0.2188
Epoch 13/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1113 - mae: 0.2238 - val_loss: 0.0996 - val_mae: 0.2170
Epoch 14/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1098 - mae: 0.2220 - val_loss: 0.0998 - val_mae: 0.2173
Epoch 15/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1086 - mae: 0.2206 - val_loss: 0.0993 - val_mae: 0.2154
Epoch 16/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1075 - mae: 0.2195 - val_loss: 0.1002 - val_mae: 0.2172
Epoch 17/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1064 - mae: 0.2184 - val_loss: 0.0992 - val_mae: 0.2148
Epoch 18/30
51/51 [==============================] - 1s 11ms/step - loss: 0.1053 - mae: 0.2173 - val_loss: 0.0993 - val_mae: 0.2154
Epoch 19/30
51/51 [==============================] - 1s 11ms/step - loss: 0.1044 - mae: 0.2164 - val_loss: 0.0990 - val_mae: 0.2145
Epoch 20/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1037 - mae: 0.2158 - val_loss: 0.0993 - val_mae: 0.2147
Epoch 21/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1028 - mae: 0.2149 - val_loss: 0.0986 - val_mae: 0.2140
Epoch 22/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1021 - mae: 0.2143 - val_loss: 0.0992 - val_mae: 0.2136
Epoch 23/30
51/51 [==============================] - 1s 11ms/step - loss: 0.1014 - mae: 0.2136 - val_loss: 0.0993 - val_mae: 0.2145
Epoch 24/30
51/51 [==============================] - 1s 10ms/step - loss: 0.1007 - mae: 0.2130 - val_loss: 0.0996 - val_mae: 0.2148
Epoch 25/30
51/51 [==============================] - 1s 11ms/step - loss: 0.1001 - mae: 0.2126 - val_loss: 0.0995 - val_mae: 0.2150
Epoch 26/30
51/51 [==============================] - 1s 10ms/step - loss: 0.0994 - mae: 0.2119 - val_loss: 0.1004 - val_mae: 0.2164
Epoch 27/30
51/51 [==============================] - 1s 10ms/step - loss: 0.0988 - mae: 0.2114 - val_loss: 0.1003 - val_mae: 0.2163
Epoch 28/30
51/51 [==============================] - 1s 11ms/step - loss: 0.0983 - mae: 0.2110 - val_loss: 0.1002 - val_mae: 0.2159
Epoch 29/30
51/51 [==============================] - 1s 11ms/step - loss: 0.0978 - mae: 0.2103 - val_loss: 0.0997 - val_mae: 0.2151
Epoch 30/30
51/51 [==============================] - 0s 10ms/step - loss: 0.0974 - mae: 0.2101 - val_loss: 0.0995 - val_mae: 0.2147
In [ ]:
Seq_train = Model_3.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_3.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
106/106 [==============================] - 0s 3ms/step
[[0.21381682 0.77037627 0.3722371  ... 0.53746766 0.1940981  0.86822844]
 [0.43124452 0.73405313 0.28179485 ... 1.0903006  0.10453896 0.7316409 ]
 [0.33096737 0.5316932  0.23229863 ... 1.4447125  0.10507081 0.37827647]
 ...
 [0.2919823  0.5507933  0.32869393 ... 0.6066104  0.31075722 0.4449557 ]
 [0.34525776 0.58457285 0.2392452  ... 0.49297974 0.3310593  0.5074482 ]
 [0.3345419  0.65518576 0.34377986 ... 0.49731445 0.26814944 0.6939844 ]] (3388, 48, 41)
24/24 [==============================] - 0s 3ms/step
[[ 0.2983591   0.34822035  0.40821597 ...  0.56143177  0.22058025
   0.5282631 ]
 [ 0.27505267  0.24525866  0.43346012 ...  0.4570888   0.15920717
   0.42072815]
 [ 0.17174944  0.36422935  0.42069125 ...  0.37820917 -0.0123011
   0.48119825]
 ...
 [ 0.05520239  0.43358788  0.5579003  ...  0.43631878  0.09162515
   0.3998679 ]
 [ 0.20967634  0.38433254  0.41281027 ...  0.4818303   0.13457482
   0.2204958 ]
 [ 0.24440953  0.41253734  0.29473191 ...  0.6070328   0.25061435
   0.6339617 ]] (740, 48, 41)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.47 RMSE
Test Score: 0.32 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()
model.add(LSTM(100, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
model.add(Dropout(.2))

#model.add(LSTM(100, activation='relu', return_sequences=True))
#model.add(Dropout(.1))

model.add(Dense(trainX.shape[2]))
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_8 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_17"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_8 (LSTM)                (None, 48, 100)           56800     
_________________________________________________________________
dropout_31 (Dropout)         (None, 48, 100)           0         
_________________________________________________________________
dense_43 (Dense)             (None, 48, 41)            4141      
=================================================================
Total params: 60,941
Trainable params: 60,941
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C3 = model.fit(trainX,trainY, epochs=30, 
                         validation_split = 0.05, 
                         batch_size = 64, 
                         callbacks=[lr_decay])
Epoch 1/30
51/51 [==============================] - 6s 118ms/step - loss: 0.2370 - mae: 0.3317 - val_loss: 0.1408 - val_mae: 0.2678
Epoch 2/30
51/51 [==============================] - 6s 112ms/step - loss: 0.1578 - mae: 0.2708 - val_loss: 0.1273 - val_mae: 0.2510
Epoch 3/30
51/51 [==============================] - 6s 115ms/step - loss: 0.1424 - mae: 0.2562 - val_loss: 0.1199 - val_mae: 0.2411
Epoch 4/30
51/51 [==============================] - 6s 114ms/step - loss: 0.1342 - mae: 0.2481 - val_loss: 0.1159 - val_mae: 0.2351
Epoch 5/30
51/51 [==============================] - 6s 114ms/step - loss: 0.1292 - mae: 0.2427 - val_loss: 0.1136 - val_mae: 0.2340
Epoch 6/30
51/51 [==============================] - 6s 115ms/step - loss: 0.1259 - mae: 0.2394 - val_loss: 0.1123 - val_mae: 0.2319
Epoch 7/30
51/51 [==============================] - 6s 113ms/step - loss: 0.1235 - mae: 0.2369 - val_loss: 0.1112 - val_mae: 0.2304
Epoch 8/30
51/51 [==============================] - 6s 113ms/step - loss: 0.1218 - mae: 0.2350 - val_loss: 0.1107 - val_mae: 0.2293
Epoch 9/30
51/51 [==============================] - 6s 117ms/step - loss: 0.1205 - mae: 0.2335 - val_loss: 0.1102 - val_mae: 0.2283
Epoch 10/30
51/51 [==============================] - 6s 116ms/step - loss: 0.1195 - mae: 0.2327 - val_loss: 0.1099 - val_mae: 0.2281
Epoch 11/30
51/51 [==============================] - 6s 113ms/step - loss: 0.1187 - mae: 0.2317 - val_loss: 0.1094 - val_mae: 0.2274
Epoch 12/30
51/51 [==============================] - 6s 116ms/step - loss: 0.1181 - mae: 0.2312 - val_loss: 0.1091 - val_mae: 0.2269
Epoch 13/30
51/51 [==============================] - 6s 115ms/step - loss: 0.1176 - mae: 0.2306 - val_loss: 0.1087 - val_mae: 0.2269
Epoch 14/30
51/51 [==============================] - 6s 117ms/step - loss: 0.1171 - mae: 0.2302 - val_loss: 0.1088 - val_mae: 0.2263
Epoch 15/30
51/51 [==============================] - 6s 113ms/step - loss: 0.1168 - mae: 0.2297 - val_loss: 0.1087 - val_mae: 0.2265
Epoch 16/30
51/51 [==============================] - 6s 118ms/step - loss: 0.1165 - mae: 0.2295 - val_loss: 0.1085 - val_mae: 0.2260
Epoch 17/30
51/51 [==============================] - 6s 114ms/step - loss: 0.1163 - mae: 0.2294 - val_loss: 0.1085 - val_mae: 0.2260
Epoch 18/30
51/51 [==============================] - 6s 114ms/step - loss: 0.1161 - mae: 0.2291 - val_loss: 0.1084 - val_mae: 0.2259
Epoch 19/30
51/51 [==============================] - 6s 111ms/step - loss: 0.1160 - mae: 0.2290 - val_loss: 0.1083 - val_mae: 0.2257
Epoch 20/30
51/51 [==============================] - 6s 115ms/step - loss: 0.1160 - mae: 0.2288 - val_loss: 0.1083 - val_mae: 0.2257
Epoch 21/30
51/51 [==============================] - 6s 115ms/step - loss: 0.1158 - mae: 0.2288 - val_loss: 0.1083 - val_mae: 0.2257
Epoch 22/30
51/51 [==============================] - 6s 117ms/step - loss: 0.1158 - mae: 0.2287 - val_loss: 0.1082 - val_mae: 0.2256
Epoch 23/30
51/51 [==============================] - 6s 111ms/step - loss: 0.1157 - mae: 0.2286 - val_loss: 0.1082 - val_mae: 0.2256
Epoch 24/30
51/51 [==============================] - 6s 116ms/step - loss: 0.1157 - mae: 0.2287 - val_loss: 0.1082 - val_mae: 0.2255
Epoch 25/30
51/51 [==============================] - 6s 115ms/step - loss: 0.1156 - mae: 0.2285 - val_loss: 0.1082 - val_mae: 0.2255
Epoch 26/30
51/51 [==============================] - 6s 113ms/step - loss: 0.1156 - mae: 0.2285 - val_loss: 0.1082 - val_mae: 0.2255
Epoch 27/30
51/51 [==============================] - 6s 114ms/step - loss: 0.1155 - mae: 0.2284 - val_loss: 0.1081 - val_mae: 0.2255
Epoch 28/30
51/51 [==============================] - 6s 111ms/step - loss: 0.1155 - mae: 0.2284 - val_loss: 0.1082 - val_mae: 0.2254
Epoch 29/30
51/51 [==============================] - 6s 115ms/step - loss: 0.1156 - mae: 0.2285 - val_loss: 0.1081 - val_mae: 0.2254
Epoch 30/30
51/51 [==============================] - 6s 114ms/step - loss: 0.1155 - mae: 0.2284 - val_loss: 0.1081 - val_mae: 0.2254
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C3.history['loss'], label='train')
plt.plot(history_C3.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C3 = model.predict(trainX, verbose = 1)
print(testingtrain_C3[0], testingtrain_C3.shape)

testingtest_C3 = model.predict(testX, verbose = 1)
print(testingtest_C3[0], testingtest_C3.shape)
106/106 [==============================] - 1s 11ms/step
[[0.45927545 0.48121288 0.24777561 ... 0.52645093 0.22701663 0.32558358]
 [0.47118452 0.5373083  0.30591816 ... 0.80852926 0.20853037 0.3308726 ]
 [0.3885165  0.4683528  0.31574866 ... 1.0537794  0.16787767 0.2521349 ]
 ...
 [0.39053735 0.7827523  0.35433933 ... 0.73631996 0.37380216 0.73480827]
 [0.3174399  0.79957515 0.29514894 ... 0.5659469  0.35352352 0.7747612 ]
 [0.30176112 0.7016568  0.26426828 ... 0.43799612 0.25625873 0.7659684 ]] (3388, 48, 41)
24/24 [==============================] - 0s 11ms/step
[[0.45309937 0.34327948 0.21979253 ... 0.46926674 0.37539014 0.42624766]
 [0.53736556 0.41213334 0.25571474 ... 0.51805246 0.37524682 0.5141171 ]
 [0.57303244 0.46513253 0.28923085 ... 0.52647895 0.3128239  0.53959787]
 ...
 [0.09989788 0.47398788 0.5975855  ... 0.46152043 0.3792242  0.54448617]
 [0.10384598 0.41656023 0.5607642  ... 0.40375203 0.30866793 0.4592802 ]
 [0.13034298 0.45645803 0.5550024  ... 0.45993876 0.30030298 0.48524088]] (740, 48, 41)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


testMAE = np.mean(mae(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.49 RMSE
Test Score: 0.33 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C3[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C3[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
########cluster 4
X_A_C4 = X_A_C4.drop(columns = 'cluster')
X_A_C4 = X_A_C4.transpose()
X_A_C4.head()
Out[ ]:
1 52 68 81 85 118 121 130 145 175 192 206 246 251 269 281 285 289
Datetime
2013-03-01 00:00:00 0.110 0.058 0.979 0.236 0.061 0.188 1.725 0.666 0.785 0.081 0.240 0.415 0.616 0.245 0.613 0.299 1.238 2.921
2013-03-01 00:30:00 0.213 0.039 0.852 0.176 0.089 0.202 1.713 0.667 0.782 0.073 0.209 0.385 0.583 0.206 0.626 0.597 1.099 1.630
2013-03-01 01:00:00 0.109 0.056 0.918 0.292 0.084 0.196 0.363 0.652 0.840 0.534 0.185 0.428 0.603 0.194 0.700 0.662 0.747 0.744
2013-03-01 01:30:00 0.180 0.042 0.893 1.173 0.059 0.180 0.025 0.693 0.821 0.774 0.188 0.390 0.600 0.235 0.639 0.625 0.116 2.269
2013-03-01 02:00:00 0.141 0.053 0.879 1.131 0.050 0.163 0.050 0.663 0.818 0.781 0.167 0.417 0.596 1.955 0.625 0.615 0.117 2.226
In [ ]:
X_A_C4 = X_A_C4.values
cap = np.percentile(X_A_C4, 97)   
X_A_C4[X_A_C4 > cap] = cap
In [ ]:
training_size=int(X_A_C4.shape[0]*0.80)

test_size=(X_A_C4.shape[0])-training_size

train,test=X_A_C4[0:training_size],X_A_C4[training_size:(X_A_C4.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3388, 48, 18) (3388, 48, 18) 
 (740, 48, 18) (740, 48, 18)
In [ ]:
                             ###Building a sequential network:
Model_4 = models.Sequential()
Model_4.add(layers.Dense(150, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_4.add(Dropout(0.5))
Model_4.add(BatchNormalization())

Model_4.add(layers.Dense(75, activation='relu'))
Model_4.add(Dropout(0.5))
Model_4.add(BatchNormalization())

Model_4.add((Dense(trainX.shape[2])))
Model_4.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_4.summary()
Model: "sequential_18"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_44 (Dense)             (None, 48, 150)           2850      
_________________________________________________________________
dropout_32 (Dropout)         (None, 48, 150)           0         
_________________________________________________________________
batch_normalization_27 (Batc (None, 48, 150)           600       
_________________________________________________________________
dense_45 (Dense)             (None, 48, 75)            11325     
_________________________________________________________________
dropout_33 (Dropout)         (None, 48, 75)            0         
_________________________________________________________________
batch_normalization_28 (Batc (None, 48, 75)            300       
_________________________________________________________________
dense_46 (Dense)             (None, 48, 18)            1368      
=================================================================
Total params: 16,443
Trainable params: 15,993
Non-trainable params: 450
_________________________________________________________________
In [ ]:
model_train = Model_4.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=64)
Epoch 1/30
48/48 [==============================] - 1s 13ms/step - loss: 1.1918 - mae: 0.8149 - val_loss: 0.1165 - val_mae: 0.2320
Epoch 2/30
48/48 [==============================] - 0s 9ms/step - loss: 0.5648 - mae: 0.5479 - val_loss: 0.1008 - val_mae: 0.2202
Epoch 3/30
48/48 [==============================] - 0s 9ms/step - loss: 0.3263 - mae: 0.4025 - val_loss: 0.0868 - val_mae: 0.2033
Epoch 4/30
48/48 [==============================] - 0s 9ms/step - loss: 0.2070 - mae: 0.3135 - val_loss: 0.0795 - val_mae: 0.1944
Epoch 5/30
48/48 [==============================] - 0s 9ms/step - loss: 0.1420 - mae: 0.2580 - val_loss: 0.0743 - val_mae: 0.1882
Epoch 6/30
48/48 [==============================] - 0s 9ms/step - loss: 0.1046 - mae: 0.2214 - val_loss: 0.0705 - val_mae: 0.1834
Epoch 7/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0826 - mae: 0.1972 - val_loss: 0.0676 - val_mae: 0.1801
Epoch 8/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0701 - mae: 0.1817 - val_loss: 0.0653 - val_mae: 0.1778
Epoch 9/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0625 - mae: 0.1714 - val_loss: 0.0635 - val_mae: 0.1757
Epoch 10/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0582 - mae: 0.1652 - val_loss: 0.0620 - val_mae: 0.1735
Epoch 11/30
48/48 [==============================] - 0s 10ms/step - loss: 0.0557 - mae: 0.1613 - val_loss: 0.0611 - val_mae: 0.1722
Epoch 12/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0542 - mae: 0.1590 - val_loss: 0.0603 - val_mae: 0.1710
Epoch 13/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0531 - mae: 0.1572 - val_loss: 0.0598 - val_mae: 0.1703
Epoch 14/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0525 - mae: 0.1562 - val_loss: 0.0593 - val_mae: 0.1699
Epoch 15/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0519 - mae: 0.1553 - val_loss: 0.0590 - val_mae: 0.1688
Epoch 16/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0517 - mae: 0.1550 - val_loss: 0.0585 - val_mae: 0.1683
Epoch 17/30
48/48 [==============================] - 0s 8ms/step - loss: 0.0514 - mae: 0.1545 - val_loss: 0.0583 - val_mae: 0.1680
Epoch 18/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0510 - mae: 0.1540 - val_loss: 0.0580 - val_mae: 0.1676
Epoch 19/30
48/48 [==============================] - 0s 8ms/step - loss: 0.0508 - mae: 0.1536 - val_loss: 0.0578 - val_mae: 0.1672
Epoch 20/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0506 - mae: 0.1533 - val_loss: 0.0577 - val_mae: 0.1669
Epoch 21/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0504 - mae: 0.1529 - val_loss: 0.0574 - val_mae: 0.1663
Epoch 22/30
48/48 [==============================] - 0s 8ms/step - loss: 0.0502 - mae: 0.1527 - val_loss: 0.0574 - val_mae: 0.1668
Epoch 23/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0501 - mae: 0.1525 - val_loss: 0.0572 - val_mae: 0.1663
Epoch 24/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0499 - mae: 0.1521 - val_loss: 0.0568 - val_mae: 0.1660
Epoch 25/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0498 - mae: 0.1520 - val_loss: 0.0566 - val_mae: 0.1649
Epoch 26/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0496 - mae: 0.1516 - val_loss: 0.0566 - val_mae: 0.1647
Epoch 27/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0495 - mae: 0.1514 - val_loss: 0.0566 - val_mae: 0.1643
Epoch 28/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0494 - mae: 0.1513 - val_loss: 0.0564 - val_mae: 0.1641
Epoch 29/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0492 - mae: 0.1509 - val_loss: 0.0562 - val_mae: 0.1644
Epoch 30/30
48/48 [==============================] - 0s 9ms/step - loss: 0.0491 - mae: 0.1507 - val_loss: 0.0558 - val_mae: 0.1637
In [ ]:
Seq_train = Model_4.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_4.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
106/106 [==============================] - 0s 3ms/step
[[0.1460951  0.5207249  0.71921813 0.07806134 0.22988477 0.38310006
  0.6234546  0.54442453 0.70042455 0.2858047  0.33388633 0.38760537
  0.4967562  0.26709566 0.4959867  0.4928999  0.43949658 0.842509  ]
 [0.1334602  0.571141   0.6828159  0.06872566 0.26418325 0.4102878
  0.67490244 0.5368628  0.70982337 0.29010683 0.3543598  0.40876842
  0.53899086 0.24281125 0.53546023 0.52541417 0.48301223 0.8424021 ]
 [0.13748956 0.21643792 0.75574976 0.21251199 0.24772616 0.35859102
  0.29442343 0.4678524  0.7321475  0.5468104  0.31026733 0.41120344
  0.5245807  0.3687296  0.52709895 0.56782997 0.38760713 0.74401367]
 [0.23915204 0.05801775 0.924471   0.9677588  0.16643262 0.21886633
  0.05358571 0.5528159  0.7961257  0.77503455 0.20418239 0.47239995
  0.4429097  0.55106884 0.41140264 0.65587366 0.20469062 0.8482503 ]
 [0.2772015  0.03470367 0.9188014  1.0365146  0.17996547 0.2402418
  0.03812617 0.54951954 0.7938448  0.8159995  0.22422034 0.49406794
  0.5611787  0.9416093  0.5366922  0.68143725 0.22178628 0.8355751 ]
 [0.3157522  0.04388835 0.8182322  0.9345529  0.22491568 0.2644894
  0.03663833 0.46658707 0.75129795 0.75865793 0.23536116 0.479441
  0.57067835 0.94737184 0.5460913  0.642051   0.25613853 0.7510531 ]
 [0.512254   0.01899248 0.8220664  1.056735   0.20211786 0.25533196
  0.0267978  0.4948935  0.7926701  0.75901544 0.21335292 0.46532422
  0.5389241  0.93767196 0.5556208  0.62887496 0.2360105  0.7535138 ]
 [0.3932822  0.02238849 0.8005215  1.0156636  0.1844945  0.22752538
  0.04130074 0.47136292 0.7488475  0.7190718  0.18372911 0.45173448
  0.40948755 0.58703846 0.36611193 0.59432834 0.19914907 0.7642163 ]
 [0.24366918 0.05964802 0.7711383  0.9183973  0.17912552 0.21596928
  0.05380383 0.4749028  0.7076237  0.6655844  0.17365776 0.438681
  0.3268407  0.37747848 0.22413826 0.55946636 0.1871832  0.7670963 ]
 [0.25802103 0.04375419 0.6169081  0.443819   0.20313887 0.27209857
  0.10556353 0.23586716 0.5920212  0.502741   0.18666176 0.37245423
  0.2534577  0.27637678 0.18393148 0.40432262 0.19232312 0.62638193]
 [0.8433441  0.03881276 0.80881596 0.24333324 0.19432357 0.2288328
  0.10560673 0.15696353 0.71395934 0.20845203 0.15997608 0.3508623
  0.13764961 0.24460943 0.12037887 0.2161988  0.13850057 0.75396496]
 [0.80919313 0.05589539 0.71309465 0.14461245 0.2031375  0.23864752
  0.1053316  0.16943645 0.65481055 0.14950043 0.16853593 0.3080255
  0.13042541 0.22079128 0.11160611 0.17229682 0.15595266 0.67363226]
 [0.20530087 0.08345677 0.24719763 0.09757061 0.21650113 0.3013659
  0.15124492 0.19312589 0.2674252  0.2430471  0.19479941 0.19670743
  0.21622923 0.19547522 0.11731641 0.22424278 0.21665709 0.31261274]
 [0.19498673 0.08470041 0.22682902 0.10395011 0.21675166 0.30434108
  0.1548738  0.19020909 0.25496346 0.24781032 0.19417022 0.19360994
  0.21487041 0.18737566 0.11721317 0.22652902 0.21574348 0.30099633]
 [0.20000002 0.08085521 0.23690943 0.10007718 0.21010971 0.29776797
  0.15128627 0.19085458 0.24751052 0.2384179  0.19066052 0.19033891
  0.21193454 0.19278787 0.11337911 0.22119078 0.2066526  0.30096114]
 [0.18659967 0.06350364 0.23548564 0.11631819 0.20420669 0.32382178
  0.1469551  0.2101559  0.3292423  0.38118762 0.1854371  0.215595
  0.21392374 0.18913248 0.10178565 0.26323166 0.1860293  0.31419957]
 [0.18620464 0.06501049 0.32654545 0.16805464 0.21287121 0.31972662
  0.14714777 0.22921799 0.4015824  0.41671127 0.1972337  0.25543547
  0.24789102 0.2154946  0.1480672  0.3072193  0.2120849  0.38629222]
 [0.20968735 0.08182231 0.2490339  0.09362782 0.22648238 0.30799735
  0.15427008 0.20151289 0.28845567 0.2567855  0.19670103 0.20549518
  0.23007417 0.19947559 0.11341014 0.22694348 0.22766668 0.31603727]
 [0.20277843 0.12913224 0.23984179 0.17216384 0.30881912 0.34025717
  0.17348336 0.2282874  0.41482818 0.32791293 0.23689888 0.27490842
  0.31727672 0.22156674 0.18922494 0.31154972 0.33412725 0.36891973]
 [0.18959466 0.07579127 0.26193598 0.17097123 0.25198868 0.33282363
  0.1566828  0.21019238 0.3919379  0.3887027  0.2095489  0.2585372
  0.26892695 0.21342269 0.15201783 0.2996898  0.25777864 0.3604568 ]
 [0.19363812 0.08514053 0.22999632 0.11731108 0.2355693  0.321461
  0.14969096 0.20944816 0.32677156 0.3039942  0.20621969 0.22070411
  0.23485291 0.19003767 0.12560588 0.25074705 0.24298179 0.322731  ]
 [0.2308768  0.12716353 0.25163248 0.18345748 0.317591   0.32581273
  0.14339218 0.27538288 0.43919247 0.29602978 0.24225347 0.27141735
  0.314581   0.22400542 0.1885061  0.30097166 0.34958696 0.37341675]
 [0.22258197 0.12682968 0.22773796 0.15070593 0.29286516 0.3319737
  0.1459777  0.23567028 0.3955691  0.28356287 0.22969297 0.2453338
  0.27833554 0.18540104 0.17190018 0.28055203 0.29937905 0.34008035]
 [0.20075195 0.10077425 0.20581111 0.12967855 0.25528964 0.31843567
  0.14851458 0.21961594 0.2990423  0.25170106 0.21281086 0.20469028
  0.24723499 0.18355566 0.14576776 0.24680579 0.26260212 0.3004459 ]
 [0.22181015 0.10087217 0.20933338 0.12684155 0.26055303 0.31831828
  0.15747517 0.20638302 0.30971345 0.24461527 0.21166193 0.21744551
  0.24590184 0.18309304 0.13752125 0.24233718 0.26585853 0.31504238]
 [0.22755419 0.08404562 0.2544329  0.10797542 0.24057943 0.30898565
  0.13932317 0.21051581 0.32660446 0.25763983 0.20720352 0.21539326
  0.23176682 0.19334073 0.13138263 0.23475164 0.24965458 0.32846412]
 [0.19545157 0.1084445  0.2098662  0.13704622 0.26938248 0.31747878
  0.16485065 0.21297458 0.30678287 0.2551245  0.21600722 0.22819115
  0.26274824 0.20112765 0.13473456 0.2539269  0.28192723 0.32411343]
 [0.20034711 0.10972589 0.20586905 0.13885626 0.2701303  0.31809792
  0.16929515 0.21559732 0.3026315  0.24911068 0.21747878 0.22586739
  0.26700404 0.20576838 0.14547668 0.25475466 0.28608942 0.3205705 ]
 [0.19875659 0.09646466 0.21797359 0.10792832 0.24365287 0.3116982
  0.16085473 0.2019658  0.28063893 0.24552724 0.20619676 0.20923501
  0.23898304 0.19159816 0.12536608 0.23700231 0.2507787  0.3118294 ]
 [0.18540753 0.06802244 0.20605958 0.11400397 0.23323    0.3375771
  0.15386137 0.19618481 0.32956058 0.37162584 0.1977994  0.2263088
  0.2425385  0.2031493  0.11969903 0.2665575  0.22058311 0.30397514]
 [0.25079292 0.08923688 0.243671   0.09999125 0.2381872  0.30542678
  0.15769303 0.19707134 0.29033688 0.22235829 0.2020145  0.20294274
  0.2268486  0.18829992 0.12843835 0.21957116 0.23928903 0.3186385 ]
 [0.20427671 0.11464997 0.19549534 0.12450762 0.27882922 0.32847095
  0.18293183 0.2136693  0.31960166 0.25545973 0.22404923 0.23369056
  0.27579525 0.19524035 0.15168522 0.25703785 0.2971951  0.3242154 ]
 [0.20328422 0.08471541 0.22943139 0.10257059 0.24300419 0.31537768
  0.15401688 0.20478581 0.29625604 0.2607414  0.20469674 0.21310261
  0.24381316 0.19935846 0.12151317 0.23529956 0.2482283  0.31350926]
 [0.22186144 0.07304311 0.28352845 0.14062579 0.22665356 0.32715532
  0.16756716 0.2131622  0.3798355  0.4019214  0.19309779 0.25221974
  0.25701138 0.21872453 0.1285891  0.29163146 0.21085206 0.3568049 ]
 [0.20250537 0.08356956 0.24762131 0.11997102 0.23743193 0.31312293
  0.14900835 0.21984617 0.3231456  0.28731495 0.20464033 0.2216427
  0.24350087 0.20293579 0.12507227 0.24686074 0.24625972 0.32959718]
 [0.19292444 0.12290112 0.20659608 0.15418537 0.28761086 0.32616812
  0.16437547 0.23649481 0.34058014 0.27100515 0.22698772 0.23838325
  0.2835687  0.20626618 0.15152113 0.27133656 0.30917323 0.33378354]
 [0.35174507 0.13406244 0.3414828  0.3164699  0.30136335 0.31425396
  0.1383351  0.4280749  0.5371796  0.31739825 0.24798639 0.27026266
  0.34826475 0.29439983 0.24915907 0.33944514 0.375323   0.43955627]
 [0.2035762  0.13883981 0.24195185 0.24232353 0.32235757 0.32662648
  0.18150647 0.2520603  0.41488054 0.3081687  0.23810461 0.30068055
  0.32295516 0.2309219  0.16479117 0.3166064  0.35122043 0.4014677 ]
 [0.20511721 0.1617116  0.24244109 0.18678112 0.34116796 0.34277815
  0.19728218 0.24893056 0.43001032 0.2932867  0.2532692  0.30730978
  0.34788692 0.2248453  0.19186993 0.3200937  0.3814237  0.405567  ]
 [0.18706176 0.08904293 0.23471272 0.16043404 0.27321377 0.33795062
  0.17052354 0.22217228 0.39067152 0.37269032 0.21661599 0.27020115
  0.28441003 0.2144577  0.14414203 0.29558378 0.27941266 0.35616943]
 [0.1862351  0.08355962 0.22826827 0.15355545 0.25670174 0.33028045
  0.15526831 0.22921133 0.37500995 0.36443532 0.21055639 0.25216877
  0.2647287  0.20373255 0.13371152 0.28369406 0.2636933  0.342337  ]
 [0.19196402 0.11471342 0.23260874 0.1737648  0.27844536 0.32107183
  0.16815355 0.23582353 0.34800214 0.28283748 0.22428362 0.2532884
  0.28549203 0.2202524  0.15224591 0.27955607 0.30244893 0.35443816]
 [0.19306314 0.11016362 0.2368063  0.16731504 0.27111328 0.31775144
  0.16406856 0.23350354 0.3363578  0.27752864 0.22137415 0.24593036
  0.28094557 0.22373767 0.15344043 0.27521136 0.29265946 0.34881324]
 [0.1878234  0.1301108  0.21003008 0.14997733 0.28570175 0.3247077
  0.18355331 0.21846043 0.3223826  0.2567517  0.22530901 0.25085616
  0.286942   0.20824687 0.14951327 0.2713616  0.30768323 0.3439215 ]
 [0.23166035 0.11153349 0.31416193 0.19823694 0.30925927 0.3355059
  0.16132854 0.2661692  0.46785188 0.3494706  0.25006142 0.30505303
  0.35710645 0.29769915 0.25243723 0.34199396 0.35501748 0.42076394]
 [0.42942965 0.14187446 0.3999539  0.17497072 0.30807823 0.33815733
  0.13623074 0.3850279  0.60309446 0.29929432 0.27507252 0.26153743
  0.36050504 0.32480967 0.3718983  0.3323828  0.39267868 0.44149202]
 [0.4117263  0.49631232 0.35476393 0.09749292 0.3743754  0.40866685
  0.28556937 0.42302203 0.66978    0.21993876 0.32191914 0.3088922
  0.4812888  0.26695848 0.51980317 0.411875   0.48037764 0.4529867 ]
 [0.15844834 0.7704018  0.44526023 0.09103329 0.3356843  0.44419092
  0.7597802  0.46444237 0.6516123  0.18946631 0.35021093 0.42086342
  0.567142   0.22242954 0.52602595 0.51808196 0.5065832  0.740782  ]] (3388, 48, 18)
24/24 [==============================] - 0s 3ms/step
[[0.18742305 0.09081501 0.20273471 0.10577083 0.23373476 0.31535706
  0.16030294 0.19544275 0.26204264 0.25287953 0.20309064 0.19942398
  0.23071763 0.18332592 0.12479181 0.23469901 0.23919033 0.2963647 ]
 [0.19176319 0.08530433 0.21274765 0.10412414 0.22568882 0.31341648
  0.15476364 0.19363584 0.2702178  0.26357108 0.19874296 0.19931257
  0.22167043 0.18423131 0.11782808 0.23252545 0.22667783 0.2987273 ]
 [0.19041197 0.08723758 0.20985554 0.1049235  0.22825092 0.3135676
  0.15660089 0.19410756 0.26694456 0.25892374 0.20005964 0.19927423
  0.22445099 0.18352555 0.12012047 0.23303543 0.2306238  0.29811132]
 [0.1938528  0.11857694 0.18011218 0.11776359 0.2850471  0.33076274
  0.18373124 0.20251298 0.2991987  0.25589746 0.2222954  0.23313692
  0.2774375  0.19893758 0.14139774 0.25866222 0.2949235  0.3168177 ]
 [0.19745743 0.14650458 0.1775462  0.11590241 0.27899373 0.33361474
  0.19679385 0.21750307 0.2981869  0.22381577 0.22630353 0.21655455
  0.27400813 0.174968   0.16615786 0.25619075 0.29302347 0.30896688]
 [0.29928967 0.18367386 0.28109327 0.36091805 0.3513799  0.3313715
  0.15403584 0.3368142  0.51035976 0.31440008 0.25508595 0.30587405
  0.37487164 0.33624157 0.2529339  0.35733157 0.39933056 0.42019922]
 [0.19132012 0.19240843 0.21979791 0.21969534 0.3320266  0.33018506
  0.21903467 0.31906545 0.41570622 0.26721403 0.2547724  0.28649065
  0.35202712 0.26576328 0.18487883 0.31878328 0.3909594  0.4026287 ]
 [0.18882206 0.36919346 0.19452184 0.1975881  0.4027775  0.38432503
  0.38261074 0.3073023  0.47520402 0.23892376 0.29719904 0.35211718
  0.444933   0.23220071 0.2804388  0.39152142 0.47416234 0.47300994]
 [0.23618107 0.4560926  0.2187718  0.3608191  0.4006248  0.36857402
  0.29350886 0.32213783 0.52231836 0.28074774 0.27146202 0.38805228
  0.457649   0.2793988  0.2880401  0.43243733 0.43548077 0.4694621 ]
 [0.29733557 0.3073203  0.22550464 0.40463    0.37438253 0.34977707
  0.2419825  0.2316681  0.47335702 0.31417292 0.24571441 0.38621306
  0.40680957 0.29933095 0.24346681 0.38924247 0.38601106 0.45367107]
 [0.19368178 0.20833313 0.1874969  0.20518512 0.35852206 0.34988475
  0.23991661 0.24428672 0.4124299  0.2840969  0.25524732 0.3199762
  0.36189604 0.23155211 0.18904749 0.33116335 0.39306152 0.39744344]
 [0.19200389 0.1441665  0.1805124  0.15738149 0.33208826 0.3559284
  0.22760051 0.19541456 0.38462254 0.32856277 0.23724595 0.30120096
  0.32984483 0.20262282 0.16012087 0.31470415 0.33125913 0.36726445]
 [0.18908045 0.14672425 0.20273098 0.15415402 0.3042259  0.33648795
  0.20489289 0.22542322 0.3576605  0.27366915 0.23495172 0.27305752
  0.30736807 0.20215446 0.16259012 0.28926277 0.33198544 0.36120635]
 [0.19324383 0.14809573 0.20890084 0.15583996 0.30497652 0.33336937
  0.18971105 0.2269822  0.3741697  0.28101626 0.23385055 0.27942324
  0.30704576 0.20896293 0.16389427 0.29238698 0.32896245 0.3633918 ]
 [0.19889992 0.14887506 0.20938313 0.16274743 0.32116047 0.3339147
  0.18650891 0.23949116 0.39211553 0.2790553  0.24062544 0.28417498
  0.31820613 0.2142381  0.16553016 0.29594463 0.35162768 0.37017056]
 [0.21487843 0.14069793 0.20946148 0.16282003 0.32712293 0.33361274
  0.17219217 0.24825123 0.40302128 0.27768344 0.24375358 0.27639037
  0.31966335 0.2223245  0.1739687  0.2924428  0.36087748 0.3618455 ]
 [0.21190989 0.55778444 0.19327034 0.15311156 0.40786326 0.40044114
  0.25157303 0.22052318 0.5054195  0.23913118 0.28012916 0.3792906
  0.47379684 0.21124607 0.37420002 0.42948073 0.406056   0.3753477 ]
 [0.22510219 0.62292546 0.1649898  0.13595861 0.41824934 0.4154642
  0.26948655 0.21414936 0.51243496 0.22247636 0.28685427 0.37885112
  0.4913301  0.19349152 0.41810063 0.43940935 0.40469423 0.34954104]
 [0.41052705 0.96916807 0.16414022 0.08115268 0.40374327 0.50760853
  0.8358452  0.23884124 0.62254727 0.08992083 0.33935523 0.45551875
  0.59939045 0.18213566 0.61871445 0.48850596 0.46698028 0.5861966 ]
 [0.24170339 1.0021881  0.3028626  0.0292912  0.36708188 0.50702935
  0.9582894  0.30655718 0.63814765 0.10391901 0.360977   0.47546923
  0.6201753  0.17323357 0.6359745  0.53872275 0.47994635 0.71390283]
 [0.15895873 0.3625182  0.5977533  0.07423293 0.27510172 0.42906737
  0.63297856 0.25243223 0.6412461  0.42124984 0.31739867 0.44473284
  0.51585436 0.21844518 0.51752883 0.52910554 0.38345927 0.7471607 ]
 [0.16708443 0.1654272  0.646562   0.12240162 0.267495   0.38613003
  0.32728857 0.24194452 0.65700936 0.52913237 0.29095587 0.41531196
  0.47899657 0.2931406  0.4855981  0.5163567  0.34964615 0.65968287]
 [0.22330956 0.04218784 0.6938733  0.535632   0.24958275 0.33203185
  0.14962813 0.2567893  0.657096   0.65814203 0.25703207 0.44930106
  0.52171004 0.7586262  0.5141986  0.55391693 0.29085976 0.6676141 ]
 [0.28717107 0.04721984 0.73583317 0.76241374 0.2778252  0.30826762
  0.06672613 0.2902227  0.70703095 0.7224736  0.25715888 0.48837292
  0.58895826 0.97416615 0.58537716 0.6112357  0.29413912 0.6868582 ]
 [0.21331069 0.08627589 0.7235569  0.34301078 0.26656777 0.3619568
  0.19904546 0.25595436 0.6936458  0.6292294  0.2870167  0.45114374
  0.54881895 0.6646007  0.5571319  0.5635011  0.33539093 0.6904329 ]
 [0.17130822 0.11490302 0.5811444  0.16908604 0.26333237 0.37067813
  0.24515444 0.2230018  0.6150265  0.55535233 0.26693034 0.39045194
  0.4450451  0.31964976 0.4354776  0.4909673  0.3176393  0.58352935]
 [0.25735548 0.15014191 0.5609293  0.12029229 0.2757862  0.37062815
  0.260493   0.22842446 0.62111855 0.46102977 0.2729531  0.36732656
  0.415488   0.27261132 0.42697734 0.44432333 0.3376351  0.5731586 ]
 [0.18583566 0.11795996 0.547311   0.12303333 0.25963303 0.3520349
  0.22512093 0.23357318 0.581025   0.46372312 0.26032266 0.35522038
  0.38410592 0.25685936 0.3576163  0.4242421  0.3148385  0.5608233 ]
 [0.19980338 0.09946582 0.6095639  0.16934365 0.23381846 0.31515604
  0.1814025  0.24644709 0.5890489  0.42844138 0.24012399 0.357692
  0.3300941  0.25973713 0.29125452 0.39284992 0.2775485  0.6103831 ]
 [0.32748145 0.09596572 0.54278064 0.12525305 0.20767973 0.26829922
  0.12725678 0.21131063 0.5071935  0.25231552 0.19169939 0.30015153
  0.19636655 0.2255141  0.09619452 0.24341981 0.2023231  0.5641645 ]
 [0.56962276 0.05112794 0.513343   0.0796104  0.18512592 0.25002843
  0.1295312  0.15518127 0.39623997 0.12040134 0.16490975 0.21124786
  0.15634367 0.23990902 0.11826975 0.1506066  0.15104532 0.46737954]
 [0.36344722 0.05782546 0.38050193 0.06180333 0.1651665  0.2561234
  0.14012218 0.15950033 0.24889117 0.13666755 0.15859063 0.16637097
  0.15161934 0.21058816 0.07932234 0.15124202 0.13523544 0.35824615]
 [0.19218479 0.08844419 0.20705776 0.09359676 0.23380545 0.309649
  0.16395265 0.19390559 0.24883997 0.23564485 0.19893558 0.1959851
  0.23325023 0.19119744 0.11862984 0.2270053  0.23297025 0.29367253]
 [0.23294762 0.19906019 0.17259818 0.25840896 0.35137436 0.3504445
  0.25347537 0.24940622 0.39417496 0.2799461  0.25505173 0.2987482
  0.36991903 0.3069796  0.20049551 0.32959455 0.38044795 0.39087048]
 [0.1837983  0.1208491  0.20046116 0.12239908 0.25977856 0.32280213
  0.17726111 0.20534971 0.28779343 0.25315598 0.21491586 0.22673717
  0.26316872 0.1955077  0.1441352  0.2576556  0.2717516  0.3161261 ]
 [0.20033655 0.13706025 0.18066919 0.11533102 0.27442047 0.33130094
  0.16738509 0.21619233 0.30521074 0.243908   0.22193272 0.20958579
  0.26571715 0.1707518  0.169506   0.25988176 0.27981105 0.29302433]
 [0.20142467 0.09548455 0.19794464 0.13929217 0.2566249  0.347401
  0.150996   0.18455654 0.35361165 0.36710057 0.20577084 0.23278104
  0.25762057 0.2023207  0.15867966 0.28541854 0.2334436  0.297601  ]
 [0.21810022 0.12322434 0.22985464 0.16739133 0.29167157 0.34508842
  0.14585567 0.21911809 0.4171132  0.36003116 0.22222894 0.2677274
  0.3031581  0.21204236 0.18711543 0.30837986 0.28846174 0.34105626]
 [0.19270694 0.10813418 0.19271037 0.1235953  0.2719482  0.32562417
  0.16485745 0.20370382 0.30916286 0.27199313 0.21515816 0.22971469
  0.2640923  0.19733867 0.13358636 0.2574157  0.27908128 0.31614164]
 [0.1804373  0.10730996 0.18717    0.13082656 0.26605222 0.32287967
  0.17507699 0.20068441 0.2738641  0.24747583 0.2143651  0.21922943
  0.25820255 0.1878691  0.12657848 0.24902195 0.27328128 0.30889386]
 [0.19314638 0.08048765 0.21330416 0.09243786 0.21737878 0.30998793
  0.15283114 0.18943056 0.25092164 0.25304946 0.19392242 0.18867329
  0.21460836 0.1845532  0.1123221  0.22353223 0.2143456  0.28950098]
 [0.18239775 0.06456937 0.2012204  0.10861927 0.21942814 0.3351583
  0.1498525  0.19384304 0.31189665 0.3684331  0.19246134 0.21370512
  0.22696242 0.19690172 0.11136427 0.25881267 0.20335475 0.2937882 ]
 [0.19531767 0.07814156 0.22005953 0.09073202 0.20990662 0.3054748
  0.15084617 0.18545218 0.24075663 0.24489087 0.19027446 0.18275294
  0.2065314  0.18261091 0.10896237 0.21740073 0.20473868 0.28755578]
 [0.18038306 0.06470387 0.20151445 0.10406843 0.21054785 0.32920122
  0.14872937 0.19603822 0.29311866 0.35020453 0.1901159  0.20318949
  0.21707752 0.19069552 0.10659704 0.24960056 0.19697869 0.28936362]
 [0.18653601 0.08077081 0.20436344 0.09965    0.21698564 0.3116664
  0.15401718 0.18797785 0.24798417 0.2598652  0.19401386 0.18850288
  0.21335839 0.18031591 0.11483771 0.22815712 0.2137658  0.28574982]
 [0.18332976 0.07484062 0.20930216 0.11284177 0.23158696 0.3307186
  0.15534995 0.19924387 0.31511942 0.34035495 0.20028476 0.2204395
  0.23985781 0.19837856 0.12404811 0.25928617 0.22706158 0.3054897 ]
 [0.18247552 0.06827998 0.19924441 0.10742837 0.22555043 0.33444038
  0.15173472 0.19819964 0.3152599  0.35901314 0.19597436 0.21734913
  0.23176695 0.19864307 0.11068254 0.2567081  0.21527863 0.29880685]
 [0.18402006 0.07699898 0.2073685  0.11106884 0.22538444 0.32600683
  0.15078199 0.20837633 0.3086033  0.32851797 0.19874412 0.21260841
  0.23010062 0.19119991 0.11619239 0.25125873 0.22363207 0.30348283]] (740, 48, 18)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.16 RMSE
Test Score: 0.10 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(75, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))

#model.add(Dropout(.01))
model.add((Dense(trainX.shape[2])))

#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_12 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_21"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_12 (LSTM)               (None, 48, 75)            28200     
_________________________________________________________________
dense_50 (Dense)             (None, 48, 18)            1368      
=================================================================
Total params: 29,568
Trainable params: 29,568
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C4 = model.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=32, callbacks=[lr_decay])
Epoch 1/30
96/96 [==============================] - 11s 111ms/step - loss: 0.0370 - mae: 0.1250 - val_loss: 7.0449 - val_mae: 0.3087
Epoch 2/30
96/96 [==============================] - 11s 114ms/step - loss: 0.0363 - mae: 0.1238 - val_loss: 10.0080 - val_mae: 0.3612
Epoch 3/30
96/96 [==============================] - 11s 110ms/step - loss: 0.0356 - mae: 0.1228 - val_loss: 0.1176 - val_mae: 0.1778
Epoch 4/30
96/96 [==============================] - 11s 115ms/step - loss: 0.0352 - mae: 0.1220 - val_loss: 0.2332 - val_mae: 0.1891
Epoch 5/30
96/96 [==============================] - 11s 111ms/step - loss: 0.0348 - mae: 0.1216 - val_loss: 0.0760 - val_mae: 0.1724
Epoch 6/30
96/96 [==============================] - 11s 109ms/step - loss: 0.0345 - mae: 0.1210 - val_loss: 0.0659 - val_mae: 0.1699
Epoch 7/30
96/96 [==============================] - 11s 111ms/step - loss: 0.0343 - mae: 0.1206 - val_loss: 0.0635 - val_mae: 0.1689
Epoch 8/30
96/96 [==============================] - 11s 112ms/step - loss: 0.0341 - mae: 0.1203 - val_loss: 0.0690 - val_mae: 0.1717
Epoch 9/30
96/96 [==============================] - 10s 109ms/step - loss: 0.0340 - mae: 0.1199 - val_loss: 0.1810 - val_mae: 0.1888
Epoch 10/30
96/96 [==============================] - 11s 111ms/step - loss: 0.0339 - mae: 0.1199 - val_loss: 0.0762 - val_mae: 0.1741
Epoch 11/30
96/96 [==============================] - 11s 113ms/step - loss: 0.0337 - mae: 0.1196 - val_loss: 0.0725 - val_mae: 0.1728
Epoch 12/30
96/96 [==============================] - 11s 111ms/step - loss: 0.0337 - mae: 0.1195 - val_loss: 0.0882 - val_mae: 0.1775
Epoch 13/30
96/96 [==============================] - 11s 114ms/step - loss: 0.0336 - mae: 0.1194 - val_loss: 0.2489 - val_mae: 0.1920
Epoch 14/30
96/96 [==============================] - 11s 112ms/step - loss: 0.0335 - mae: 0.1193 - val_loss: 0.1117 - val_mae: 0.1803
Epoch 15/30
96/96 [==============================] - 11s 113ms/step - loss: 0.0335 - mae: 0.1193 - val_loss: 0.1349 - val_mae: 0.1829
Epoch 16/30
96/96 [==============================] - 11s 113ms/step - loss: 0.0335 - mae: 0.1192 - val_loss: 0.2247 - val_mae: 0.1904
Epoch 17/30
96/96 [==============================] - 11s 110ms/step - loss: 0.0334 - mae: 0.1192 - val_loss: 0.1475 - val_mae: 0.1838
Epoch 18/30
96/96 [==============================] - 11s 113ms/step - loss: 0.0334 - mae: 0.1191 - val_loss: 0.1237 - val_mae: 0.1814
Epoch 19/30
96/96 [==============================] - 11s 111ms/step - loss: 0.0334 - mae: 0.1191 - val_loss: 0.1755 - val_mae: 0.1857
Epoch 20/30
96/96 [==============================] - 11s 113ms/step - loss: 0.0334 - mae: 0.1190 - val_loss: 0.1743 - val_mae: 0.1856
Epoch 21/30
96/96 [==============================] - 11s 112ms/step - loss: 0.0334 - mae: 0.1190 - val_loss: 0.1374 - val_mae: 0.1827
Epoch 22/30
96/96 [==============================] - 11s 112ms/step - loss: 0.0334 - mae: 0.1190 - val_loss: 0.1824 - val_mae: 0.1860
Epoch 23/30
96/96 [==============================] - 11s 113ms/step - loss: 0.0334 - mae: 0.1190 - val_loss: 0.1380 - val_mae: 0.1825
Epoch 24/30
96/96 [==============================] - 11s 113ms/step - loss: 0.0333 - mae: 0.1190 - val_loss: 0.1623 - val_mae: 0.1848
Epoch 25/30
96/96 [==============================] - 11s 112ms/step - loss: 0.0333 - mae: 0.1190 - val_loss: 0.2101 - val_mae: 0.1879
Epoch 26/30
96/96 [==============================] - 11s 111ms/step - loss: 0.0333 - mae: 0.1190 - val_loss: 0.1813 - val_mae: 0.1861
Epoch 27/30
96/96 [==============================] - 11s 110ms/step - loss: 0.0333 - mae: 0.1190 - val_loss: 0.1903 - val_mae: 0.1866
Epoch 28/30
96/96 [==============================] - 11s 112ms/step - loss: 0.0333 - mae: 0.1190 - val_loss: 0.1867 - val_mae: 0.1864
Epoch 29/30
96/96 [==============================] - 11s 110ms/step - loss: 0.0333 - mae: 0.1190 - val_loss: 0.1850 - val_mae: 0.1863
Epoch 30/30
96/96 [==============================] - 11s 110ms/step - loss: 0.0333 - mae: 0.1190 - val_loss: 0.1820 - val_mae: 0.1861
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C4.history['loss'], label='train')
plt.plot(history_C4.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C4 = model.predict(trainX, verbose = 1)
print(testingtrain_C4[0], testingtrain_C4.shape)

testingtest_C4 = model.predict(testX, verbose = 1)
print(testingtest_C4[0], testingtest_C4.shape)
106/106 [==============================] - 1s 11ms/step
[[ 0.34847865  0.33134747  0.65411437  0.31734902  0.23357093  0.43364936
   0.6308432   0.6235368   0.7626569   0.39810959  0.34588036  0.5087725
   0.5158555   0.42815256  0.46478593  0.47605717  0.44181588  0.7779535 ]
 [ 0.3418491   0.3140387   0.73476034  0.17357278  0.23937747  0.34940225
   0.7178322   0.70318604  0.7355412   0.3449066   0.36253443  0.4977805
   0.48538512  0.4447646   0.4743549   0.4620262   0.48352608  0.9269818 ]
 [ 0.19935729  0.16936828  0.7688603   0.21440336  0.22941566  0.23829818
   0.39117375  0.67130417  0.75976914  0.55334675  0.3519031   0.45777342
   0.47869813  0.45964047  0.5381393   0.51926935  0.4950239   0.7679342 ]
 [ 0.20805061  0.0952634   0.86556274  0.82174104  0.22263584  0.13701183
   0.22643115  0.7046214   0.77698207  0.74569535  0.31351656  0.55884236
   0.53111243  0.6803082   0.6360248   0.61318076  0.4562818   0.8066846 ]
 [ 0.26185757  0.151995    0.88851535  1.0948297   0.24266195  0.13277578
   0.23086202  0.6758188   0.7421122   0.8030253   0.2764188   0.5410931
   0.5576587   1.0927806   0.61361253  0.5924508   0.3291512   0.8374896 ]
 [ 0.45275113  0.170626    0.8074373   1.1030734   0.18431689  0.14563492
   0.18558818  0.61044765  0.7343758   0.7883865   0.22256511  0.5168352
   0.5223893   1.0346022   0.58745134  0.5868682   0.2448464   0.7947382 ]
 [ 0.8899257   0.11324693  0.85164714  1.1210176   0.1796245   0.11511088
   0.13114327  0.5981216   0.7745789   0.76892895  0.1809765   0.5006995
   0.5137891   0.81752366  0.56325537  0.6141901   0.18733436  0.7557893 ]
 [ 0.6510505   0.13272086  0.87393206  1.0337298   0.13216612  0.09269217
   0.1302394   0.5917916   0.77631366  0.76534325  0.15393676  0.48224288
   0.46042627  0.3140118   0.3231063   0.65263855  0.12763433  0.78641427]
 [ 0.23820718  0.08750099  0.871309    1.0349058   0.15380053  0.09789036
   0.1409311   0.49169385  0.82121813  0.7657191   0.14269383  0.45105255
   0.28952914  0.26204446  0.09707496  0.64048797  0.15830964  0.8108456 ]
 [ 0.3484182   0.04713715  0.822768    0.47285593  0.1405981   0.17176345
   0.18190941  0.1923962   0.8604632   0.5130125   0.15995261  0.39808008
   0.18033673  0.27605885  0.08040015  0.46110344  0.10978624  0.78863573]
 [ 1.1440355   0.01537403  0.9725869   0.1352014   0.099021    0.14543122
   0.1538805   0.19701353  0.75180286  0.150821    0.1643925   0.29307368
   0.05747473  0.3420913   0.06657937  0.26177296  0.11595514  0.76373076]
 [ 0.97625196 -0.05650917  0.67839617  0.06499325  0.17397478  0.16779408
   0.11286445  0.26243445  0.59325665  0.17075747  0.1659667   0.1619615
   0.14575846  0.43862197  0.02826215  0.18781371  0.15274948  0.73916876]
 [ 0.20569073  0.01610589  0.31515935  0.07864746  0.21440002  0.19019502
   0.18015504  0.23854828  0.291225    0.243352    0.14408192  0.10024306
   0.25839987  0.36454642  0.05742385  0.21600088  0.13993308  0.47564507]
 [ 0.08544365  0.09131619  0.2887347   0.10043544  0.15442319  0.21782547
   0.23000799  0.20389822  0.22127484  0.24953985  0.1449042   0.15224344
   0.21087763  0.34481093  0.11083049  0.2528077   0.1425038   0.3285407 ]
 [ 0.08317327  0.0947925   0.32517835  0.09231099  0.13515979  0.2553501
   0.22505641  0.24347016  0.20984045  0.3038675   0.17442375  0.24317047
   0.14947724  0.32046384  0.10494517  0.2900728   0.11227387  0.3502316 ]
 [ 0.16054313  0.09560767  0.33659482  0.0773498   0.08991855  0.3105456
   0.23461077  0.21875295  0.32534984  0.51692873  0.19519961  0.2883797
   0.08197443  0.2367531   0.08571889  0.36495656  0.10658281  0.34908628]
 [ 0.1805403   0.08548099  0.3825938   0.1276664   0.0623092   0.29517904
   0.23418204  0.2869788   0.47734737  0.7146841   0.20027113  0.28361437
   0.16298391  0.2318852   0.09380515  0.408481    0.1076536   0.32282996]
 [ 0.2342205   0.09396447  0.45298535  0.07556739  0.05490331  0.30889943
   0.26672685  0.26204506  0.49936166  0.62722504  0.2290968   0.2512898
   0.26459187  0.2883045   0.07346872  0.33627543  0.12726155  0.34231573]
 [ 0.08429602  0.05446457  0.3627776   0.08133876  0.07868889  0.33209035
   0.17139095  0.26393643  0.5913636   0.62916017  0.20903221  0.23580348
   0.30942863  0.24097818  0.09597186  0.36630648  0.15646663  0.3343974 ]
 [ 0.05422398  0.02079626  0.32448918  0.18640244  0.09848262  0.26881352
   0.18338734  0.26186875  0.59733534  0.6050816   0.15777396  0.2500209
   0.3037163   0.22575873  0.1105237   0.33047232  0.13651934  0.26307046]
 [ 0.09196596  0.08807383  0.31267452  0.1585112   0.11789043  0.26248613
   0.23312306  0.23146513  0.5041573   0.41968325  0.15818948  0.28468102
   0.26804104  0.21431062  0.07473945  0.2834401   0.12220996  0.31638336]
 [ 0.13217846  0.11443327  0.3264953   0.12816831  0.17423199  0.24494165
   0.15588693  0.28081647  0.56759614  0.34461626  0.16233328  0.28579682
   0.2720118   0.22910345  0.07532335  0.30073807  0.14396322  0.3373574 ]
 [ 0.21500385  0.12751482  0.28577104  0.08369857  0.2375808   0.34657526
   0.1044227   0.26541248  0.49740314  0.19383366  0.19292848  0.28196552
   0.2484152   0.18894652  0.04952124  0.27381817  0.12264827  0.37146518]
 [ 0.2632322   0.1507427   0.2883529   0.12740031  0.24865776  0.36333442
   0.16287659  0.33020902  0.36288255  0.14863801  0.22062725  0.29237187
   0.26693904  0.216777    0.04198724  0.23528501  0.13958171  0.35810918]
 [ 0.3399088   0.1473474   0.2670682   0.17358251  0.23044679  0.38084164
   0.21531919  0.30526033  0.31575292  0.11237119  0.22252443  0.28735822
   0.27250075  0.24045318  0.05526621  0.17616092  0.18310496  0.3403919 ]
 [ 0.25240648  0.15510319  0.25871277  0.138314    0.24829492  0.3627598
   0.21494311  0.31295305  0.41357905  0.15189904  0.24906781  0.28543872
   0.33506662  0.24576437  0.07943618  0.1902549   0.20777261  0.35196602]
 [ 0.23091689  0.16062155  0.23367636  0.16809016  0.2916716   0.356799
   0.24952063  0.33074895  0.35604241  0.15132141  0.26331368  0.2617199
   0.33337736  0.22067288  0.08416681  0.21180502  0.2850589   0.3407206 ]
 [ 0.24899274  0.16250238  0.21913612  0.23594972  0.31488836  0.3506473
   0.2412294   0.3273872   0.3195186   0.20313501  0.26978332  0.2603371
   0.34396207  0.23825586  0.11712218  0.21768358  0.33421206  0.3052129 ]
 [ 0.2445238   0.15495665  0.23306358  0.14594238  0.3443367   0.36594078
   0.22951688  0.32177934  0.3804077   0.21731868  0.263118    0.24284294
   0.37365097  0.21614856  0.1608431   0.24279547  0.35928583  0.32501844]
 [ 0.36530063  0.11933941  0.24191387  0.13711971  0.3798794   0.3970783
   0.1932864   0.33411437  0.38301286  0.32179585  0.22745132  0.24617705
   0.44628388  0.17474449  0.15822716  0.28933722  0.36343682  0.29629177]
 [ 0.35283354  0.16973284  0.32038817  0.12735957  0.34442112  0.4156945
   0.25498787  0.3509275   0.363941    0.280709    0.22864467  0.29564148
   0.5140261   0.19762686  0.15194464  0.28302035  0.41505465  0.35612985]
 [ 0.34919322  0.17838205  0.33167112  0.12766904  0.37570184  0.45494598
   0.23947774  0.35950467  0.3705045   0.32796362  0.24756625  0.33266392
   0.56026465  0.20969385  0.15113808  0.30796972  0.46071324  0.36606863]
 [ 0.32291457  0.16629517  0.38303024  0.125577    0.3809331   0.4931411
   0.22338356  0.3682088   0.394422    0.35661086  0.26634586  0.34238312
   0.6037056   0.2067934   0.14332408  0.3073871   0.46010503  0.35250527]
 [ 0.21798988  0.11540095  0.6102818   0.16282363  0.3144725   0.47666278
   0.1759832   0.47531536  0.5393664   0.4900721   0.21954602  0.3722869
   0.7031867   0.2615778   0.14886937  0.40616247  0.41817886  0.30876547]
 [ 0.08335446  0.21383038  0.5614669   0.21719256  0.3460422   0.4838092
   0.19149628  0.4022915   0.4735296   0.42879137  0.22490281  0.46445355
   0.7185363   0.2842558   0.09345563  0.3685152   0.34108734  0.35925847]
 [ 0.08089271  0.28765774  0.51946294  0.22982146  0.39693376  0.50470096
   0.16620892  0.37286606  0.43220106  0.43629283  0.24334711  0.5176797
   0.6959939   0.2559018   0.12735943  0.4329223   0.3250874   0.34848222]
 [ 0.47879645  0.38431776  0.56431365  0.45842996  0.45748824  0.48236024
   0.19119045  0.5711216   0.57522243  0.5032646   0.21682718  0.48962805
   0.73347735  0.29123238  0.22217783  0.51210964  0.34812853  0.38528964]
 [ 0.17516434  0.37262008  0.4055838   0.46575764  0.5139565   0.46739784
   0.25080785  0.35114303  0.46674645  0.5637994   0.17980854  0.6329856
   0.66756165  0.2373271   0.1265751   0.43032467  0.36504763  0.46878964]
 [ 0.16347617  0.3908533   0.38025814  0.27764642  0.5826107   0.4458446
   0.24955896  0.20501965  0.64444405  0.48922017  0.23426813  0.69631904
   0.5910962   0.25760433  0.21913907  0.38005137  0.35878944  0.53312135]
 [ 0.22020055  0.2600699   0.31061524  0.32360616  0.5645588   0.38031894
   0.24218129  0.19906187  0.4296529   0.50737995  0.21230534  0.69564044
   0.46474612  0.26558518  0.08472008  0.35983905  0.24866226  0.4467327 ]
 [ 0.2096671   0.23482531  0.26408225  0.26869065  0.5386773   0.35771352
   0.20471397  0.24225986  0.5241196   0.5202622   0.21066988  0.6159477
   0.46473503  0.2868956   0.10860452  0.3463262   0.23379219  0.41253662]
 [ 0.23897599  0.24733575  0.28724954  0.28174835  0.5591221   0.33139583
   0.18386164  0.2717507   0.47897702  0.42710137  0.24673077  0.5501475
   0.43327808  0.35220513  0.09861016  0.30110252  0.23061791  0.42305177]
 [ 0.27280575  0.25460485  0.27860898  0.27022946  0.5545407   0.35014674
   0.15832692  0.29080808  0.47802165  0.4245692   0.27289504  0.49621618
   0.42367506  0.3982271   0.14388123  0.26831552  0.22983342  0.41203922]
 [ 0.22618683  0.26633063  0.22371626  0.2586473   0.5419657   0.37248215
   0.16383788  0.2356492   0.4365075   0.4203273   0.29450312  0.50340825
   0.42613196  0.41465756  0.17743622  0.24150246  0.2038342   0.4095251 ]
 [ 0.4055438   0.3646634   0.24833822  0.2845343   0.5389327   0.4262016
   0.05762761  0.26860774  0.75293505  0.47693896  0.33378792  0.42239958
   0.49850416  0.48047763  0.4577661   0.31364518  0.26702648  0.3715566 ]
 [ 0.7912903   0.42955443  0.40171945  0.18748957  0.512231    0.44228274
   0.1651545   0.5097798   0.9575568   0.4343647   0.33928603  0.27215588
   0.52779955  0.5506473   0.6188229   0.32084104  0.33014777  0.36963356]
 [ 0.77317774  0.85334945  0.37428808  0.19134654  0.45341834  0.49440628
   0.40516302  0.6582526   0.9518429   0.32874262  0.360472    0.30488554
   0.6238083   0.5165055   0.72678936  0.33201575  0.30744606  0.40833205]
 [ 0.2428259   1.1739423   0.3592953   0.13307332  0.41155222  0.5518261
   0.87900645  0.6148514   0.8282809   0.18471399  0.39855796  0.43423486
   0.6789812   0.4680179   0.6726862   0.41202185  0.3102579   0.7051426 ]] (3388, 48, 18)
24/24 [==============================] - 0s 11ms/step
[[ 0.13117734  0.0758011   0.15309733  0.0507622   0.16889064  0.21233009
   0.12576497  0.13878669  0.18333277  0.15977812  0.1445106   0.15356854
   0.1366197   0.10630357  0.07402985  0.1544674   0.1631361   0.22769311]
 [ 0.17163649  0.07640116  0.17439654  0.04515506  0.2026624   0.2646847
   0.13200665  0.15757364  0.24254532  0.18637083  0.17020819  0.17341274
   0.16559684  0.12987491  0.09627961  0.17477241  0.20688939  0.27247697]
 [ 0.15804364  0.06642211  0.17335773  0.04413388  0.21802063  0.285682
   0.11901797  0.1651155   0.24918582  0.20202464  0.18104987  0.17089137
   0.17748156  0.12835467  0.10076759  0.18562141  0.22520256  0.27785146]
 [ 0.22649254  0.07338894  0.13156861  0.03802335  0.2819645   0.3315023
   0.13668008  0.1647041   0.25608477  0.2038526   0.21451804  0.18078339
   0.20306718  0.12161655  0.09906939  0.20423844  0.291152    0.2895578 ]
 [ 0.21916278  0.09986988  0.16790257  0.08024482  0.28722867  0.39172268
   0.19846861  0.23661533  0.2666722   0.23059964  0.24290937  0.21464443
   0.31151873  0.13648811  0.12309771  0.25917712  0.36728352  0.33026335]
 [ 0.21827348  0.11786456  0.19118297  0.44673747  0.43682247  0.41283894
   0.22198527  0.3429384   0.37741342  0.29292214  0.2587819   0.24775085
   0.41642928  0.28643155  0.20728539  0.31442738  0.43451846  0.39076725]
 [ 0.2157102   0.16383117  0.23048453  0.3434554   0.41342562  0.36033124
   0.31508029  0.45643523  0.4109433   0.2707257   0.26118174  0.21874169
   0.39269513  0.26495254  0.15199976  0.34415874  0.546174    0.411489  ]
 [ 0.17213863  0.17491776  0.25666803  0.21602651  0.47036326  0.35496524
   0.3801129   0.46737728  0.40377197  0.29163063  0.3003014   0.3364574
   0.44571602  0.23899058  0.13110793  0.40939516  0.7055506   0.58243483]
 [ 0.21629553  0.4033263   0.15019205  0.5675644   0.5504516   0.34171265
   0.30939245  0.3998516   0.40718436  0.30507347  0.3146609   0.3580372
   0.4541763   0.23612463  0.18324181  0.38673168  0.6943644   0.44949582]
 [ 0.31521967  0.3257936   0.0341415   0.67571574  0.65877795  0.28280157
   0.22563706  0.2979796   0.2833564   0.43295714  0.31820315  0.48571384
   0.40883917  0.25590363  0.13545863  0.3356129   0.65787256  0.41393006]
 [ 0.13260163  0.18829393  0.06075709  0.41972744  0.63591945  0.25035936
   0.19022645  0.2703054   0.33653277  0.43727946  0.31919038  0.40202722
   0.38285345  0.19885963  0.11372313  0.24225736  0.59500194  0.43632415]
 [ 0.19494265  0.10744124  0.03122512  0.22051099  0.6635716   0.299599
   0.23998548  0.24007618  0.3427019   0.4586428   0.31599307  0.40690562
   0.35295618  0.20401472  0.15110043  0.19127217  0.50490135  0.5084563 ]
 [ 0.20538355  0.1776398   0.09133969  0.21595067  0.60304004  0.26004192
   0.17266461  0.2727229   0.3485043   0.35361925  0.3246765   0.32741028
   0.3931502   0.22666562  0.21387348  0.18047987  0.41707882  0.4177891 ]
 [ 0.17579839  0.202957    0.07693715  0.16172037  0.61446387  0.25604188
   0.10939739  0.28002653  0.36047354  0.3728614   0.31844205  0.33194837
   0.42469913  0.26303697  0.23307715  0.1673376   0.31900775  0.41233164]
 [ 0.25785887  0.2575331   0.08031096  0.18745022  0.6230944   0.3002853
   0.04298996  0.30432186  0.479922    0.31701487  0.32352728  0.25107655
   0.46010596  0.29810685  0.2888096   0.1777531   0.30047607  0.3735202 ]
 [ 0.24719875  0.29173166  0.0796576   0.19716373  0.6299565   0.33351755
  -0.01108192  0.31738722  0.5698645   0.30134824  0.3311007   0.17885888
   0.4725243   0.25044426  0.3176718   0.19458114  0.2740989   0.30485868]
 [ 0.18259317  0.7014234   0.03204051  0.25375155  0.6102608   0.41660488
   0.08313066  0.30624163  0.5930877   0.29171807  0.3281374   0.33016884
   0.6992241   0.29594794  0.43449008  0.3070716   0.11850989  0.26687017]
 [ 0.21299316  0.9216856  -0.00285492  0.2769859   0.58280766  0.5365615
   0.2693792   0.20360169  0.6289103   0.2902177   0.3177779   0.42148593
   0.79645836  0.35090074  0.52558947  0.38771895  0.05415599  0.2532816 ]
 [ 0.6578885   1.223114   -0.06717199  0.28804386  0.39395908  0.70171404
   0.99893385  0.1137649   0.673345    0.15622692  0.36546227  0.48134995
   0.85519916  0.4471778   0.8044863   0.35777643  0.07691365  0.45909992]
 [ 0.2078171   1.3796867   0.04966539  0.19151251  0.32704952  0.74287075
   1.3384799   0.11552538  0.7012414   0.09172019  0.3548065   0.49967253
   0.912934    0.3180185   0.72891116  0.5334983   0.02458486  0.6419207 ]
 [ 0.21560748  0.643018    0.6231614   0.05301955  0.32052097  0.74115145
   1.3446318   0.09216184  0.72355986  0.47684827  0.3142345   0.4104172
   0.8032457   0.23517454  0.64134663  0.51292694  0.18143353  0.69264483]
 [ 0.24239369  0.30239305  0.7925959   0.03869272  0.30522734  0.63540924
   0.9255553   0.09283143  0.7866515   0.77139455  0.29747418  0.47125804
   0.7604904   0.27333018  0.72056204  0.62265015  0.22194862  0.65264285]
 [ 0.10069317  0.14113192  0.89477515  0.41009447  0.31767973  0.5595585
   0.63950247  0.02046614  0.74280584  0.907367    0.24129653  0.5110167
   0.69095117  1.0439034   0.6823191   0.6054354   0.14252669  0.7436552 ]
 [ 0.20825778  0.1642515   0.8328355   0.78206646  0.32195354  0.41058436
   0.33040917  0.00419916  0.8007187   0.88011247  0.23874113  0.53682303
   0.7442105   1.202602    0.7216648   0.61708355  0.07273889  0.6624793 ]
 [ 0.23241195  0.2843055   0.82586837  0.2071198   0.22933537  0.38944632
   0.21273935  0.0052818   0.88942105  0.73485893  0.3097943   0.48952827
   0.8442255   0.6267203   0.71539485  0.5064064  -0.02397387  0.59534156]
 [ 0.36823085  0.26339442  0.9156047   0.03423399  0.26284206  0.3617153
   0.22863398  0.03377045  0.8670545   0.74600536  0.30492646  0.4838062
   0.72539073  0.09984898  0.76412696  0.47874704 -0.0618285   0.5749982 ]
 [ 0.68232924  0.20416027  0.86001706  0.04554736  0.3384148   0.34032053
   0.26129135 -0.04906417  0.76156837  0.5550352   0.22409862  0.5345115
   0.5449951   0.03023119  0.7928136   0.32719514 -0.0684802   0.5973301 ]
 [ 0.36168316  0.17576858  0.818869    0.0273532   0.3341294   0.3711669
   0.21434458 -0.04429653  0.683894    0.42586118  0.15710568  0.5194404
   0.3989858   0.0445471   0.56129056  0.22472432 -0.06596003  0.62725985]
 [ 0.31094044  0.13355932  0.8297273   0.09558588  0.31284022  0.346982
   0.1994843  -0.0380014   0.548717    0.30727938  0.1364014   0.45931345
   0.24016467  0.11326713  0.43588188  0.18900824 -0.02156863  0.5581616 ]
 [ 0.32690263  0.0825187   0.73062414  0.02419622  0.22830364  0.3168521
   0.2615594   0.05573602  0.4564764   0.1435302   0.14220378  0.32769975
   0.14047417  0.17930442  0.24213162  0.15263611  0.03439446  0.45384115]
 [ 0.7270749   0.05370483  0.6503301  -0.02442172  0.2302508   0.23188272
   0.30176404  0.10081147  0.25164726  0.151044    0.15169504  0.18850997
   0.19503787  0.25785083  0.1330802   0.1019168  -0.00211518  0.15566625]
 [ 0.5132375   0.05318334  0.482181    0.005574    0.2608612   0.19962737
   0.34036925  0.218867    0.19259757  0.24010018  0.17704585  0.10894841
   0.2815843   0.36770386  0.03774735  0.10620506 -0.01113864  0.06075065]
 [ 0.29370207  0.06991947  0.19966966  0.12365515  0.3144956   0.27042365
   0.3565947   0.24869701  0.14841065  0.31523746  0.18976758  0.1604267
   0.24565086  0.401629   -0.00691485  0.11509646  0.08860719  0.09716512]
 [ 0.12469126  0.11720839  0.24673274  0.3808558   0.42380658  0.2621368
   0.32887724  0.1886324   0.12806496  0.47281274  0.27195993  0.34962964
   0.24486706  0.36335126 -0.09188677  0.12580143  0.21914107  0.13138317]
 [ 0.16931829  0.08405571  0.13139257  0.16333552  0.33958766  0.32529503
   0.26660106  0.11680223  0.2296581   0.4701092   0.2230686   0.3725225
   0.23047365  0.31476554  0.08270313  0.154771    0.13769959  0.1891731 ]
 [ 0.1846317   0.06302342  0.08073346  0.15223067  0.35505947  0.38914916
   0.23833379  0.08478674  0.20786288  0.4734787   0.22800201  0.3280982
   0.18471117  0.2572767   0.11909868  0.24955407  0.13159747  0.2797783 ]
 [ 0.2507956   0.00862758  0.03633691  0.13553326  0.31569448  0.41760764
   0.2678839   0.09138826  0.1763573   0.5089131   0.17960162  0.27677804
   0.14227498  0.23378101  0.10949497  0.24184388  0.12627165  0.3024571 ]
 [ 0.12922797 -0.05693577  0.02901192  0.07853359  0.29554048  0.39180577
   0.17896733  0.12038261  0.3897135   0.56368756  0.1906384   0.18238878
   0.19135481  0.15914914  0.12933834  0.27646235  0.22110885  0.25351232]
 [ 0.12636372 -0.00212509  0.0841718   0.09624996  0.31311196  0.3103539
   0.18983991  0.1100104   0.2619169   0.4316431   0.2024247   0.20129576
   0.17343009  0.17069802  0.10783818  0.20815408  0.20936173  0.23397937]
 [ 0.10983007 -0.01355921  0.09379672  0.13183509  0.31351525  0.26214504
   0.18180539  0.11299804  0.18988413  0.3457547   0.17699985  0.21104053
   0.14596361  0.17433181  0.11005978  0.18551457  0.2016221   0.23108107]
 [ 0.1236245   0.00789198  0.09939071  0.11609329  0.26733673  0.26609793
   0.16725302  0.10877615  0.1451251   0.2664126   0.16471568  0.199188
   0.10942814  0.17214596  0.13330112  0.1756801   0.1687816   0.24764147]
 [ 0.21199918 -0.01060315  0.07083106  0.11390285  0.23238501  0.2935898
   0.12949032  0.1545439   0.17624931  0.26860806  0.14612402  0.14183229
   0.10673767  0.11745659  0.14782844  0.1923987   0.17481723  0.26727587]
 [ 0.17890036  0.0101696   0.10042502  0.10315908  0.19509742  0.27651644
   0.13376099  0.1394335   0.13702236  0.1872485   0.15051602  0.16257548
   0.14005998  0.10900279  0.13990313  0.19810928  0.1779947   0.2820027 ]
 [ 0.19179046 -0.01456638  0.08676182  0.09633897  0.17623526  0.30208972
   0.13085437  0.17001247  0.1543244   0.1871883   0.14917612  0.15604001
   0.15176341  0.09494692  0.10756082  0.20251974  0.17445615  0.3188079 ]
 [ 0.17063826  0.02237317  0.08368277  0.08326905  0.16152133  0.2903669
   0.1626313   0.15619361  0.12501726  0.12652192  0.15626806  0.17722386
   0.17606568  0.09502742  0.08760568  0.21266958  0.17204034  0.3357885 ]
 [ 0.17859587  0.0213404   0.09878099  0.05881654  0.17770356  0.3151365
   0.11991301  0.19253251  0.16461103  0.167406    0.18657854  0.1589215
   0.23009121  0.09220351  0.1285808   0.2298041   0.16120937  0.32749468]
 [ 0.19103913 -0.00560956  0.11435831  0.05848746  0.20017132  0.32656735
   0.10176846  0.20514297  0.16586849  0.17564213  0.20156273  0.16410664
   0.2768981   0.10388201  0.11217664  0.22192052  0.14135611  0.35107   ]
 [ 0.16628167 -0.00698287  0.16517462  0.06864651  0.21791172  0.31534278
   0.08262844  0.2290282   0.20626767  0.18696947  0.22154877  0.1571981
   0.34719688  0.1259259   0.11823417  0.21939391  0.11648646  0.37124035]] (740, 48, 18)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C4[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C4[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.27 RMSE
Test Score: 0.19 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C4[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C4[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
km_4_W = KMeans(4).fit(X_W)
pd.Series.value_counts(km_4_W.labels_)
Out[ ]:
0    151
1    108
2     21
3     19
dtype: int64
In [ ]:
X_W['cluster'] = km_4_W.labels_
In [ ]:
##############WINTER
X_W_C1 = X_W[X_W.cluster == 0]
X_W_C2 = X_W[X_W.cluster == 1]
X_W_C3 = X_W[X_W.cluster == 2]
X_W_C4 = X_W[X_W.cluster == 3]
In [ ]:
X_W_C1 = X_W_C1.drop(columns = 'cluster')
X_W_C1 = X_W_C1.transpose()
X_W_C1.head()
In [ ]:
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_W_C1 = scaler.fit_transform(X_W_C1)
In [ ]:
X_W_C1 = X_W_C1.values
cap = np.percentile(X_W_C1, 97)   
X_W_C1[X_W_C1 > cap] = cap
In [ ]:
training_size=int(X_W_C1.shape[0]*0.80)

test_size=(X_W_C1.shape[0])-training_size

train,test=X_W_C1[0:training_size],X_W_C1[training_size:(X_W_C1.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test, 48, 48, 48)

print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(3388, 48, 151) (3388, 48, 151)
(740, 48, 151) (740, 48, 151)
In [ ]:
from keras.regularizers import l2

Model_1 = models.Sequential()

Model_1.add(layers.Dense(500, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2]), kernel_regularizer=l2(0.001)))
Model_1.add(Dropout(.2))
Model_1.add(BatchNormalization())
Model_1.add(layers.Dense(300, activation='relu'))
Model_1.add(Dropout(.2))
Model_1.add(BatchNormalization())
Model_1.add((Dense(trainX.shape[2])))
Model_1.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_1.summary()
Model: "sequential_26"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_65 (Dense)             (None, 48, 500)           76000     
_________________________________________________________________
dropout_46 (Dropout)         (None, 48, 500)           0         
_________________________________________________________________
batch_normalization_39 (Batc (None, 48, 500)           2000      
_________________________________________________________________
dense_66 (Dense)             (None, 48, 300)           150300    
_________________________________________________________________
dropout_47 (Dropout)         (None, 48, 300)           0         
_________________________________________________________________
batch_normalization_40 (Batc (None, 48, 300)           1200      
_________________________________________________________________
dense_67 (Dense)             (None, 48, 151)           45451     
=================================================================
Total params: 274,951
Trainable params: 273,351
Non-trainable params: 1,600
_________________________________________________________________
In [ ]:
model_train = Model_1.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size = 64)
Epoch 1/30
48/48 [==============================] - 1s 22ms/step - loss: 0.7124 - mae: 0.5252 - val_loss: 0.1954 - val_mae: 0.1472
Epoch 2/30
48/48 [==============================] - 1s 16ms/step - loss: 0.2583 - mae: 0.2789 - val_loss: 0.1081 - val_mae: 0.1531
Epoch 3/30
48/48 [==============================] - 1s 16ms/step - loss: 0.1357 - mae: 0.2103 - val_loss: 0.0724 - val_mae: 0.1492
Epoch 4/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0814 - mae: 0.1727 - val_loss: 0.0590 - val_mae: 0.1490
Epoch 5/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0564 - mae: 0.1503 - val_loss: 0.0542 - val_mae: 0.1485
Epoch 6/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0440 - mae: 0.1358 - val_loss: 0.0532 - val_mae: 0.1492
Epoch 7/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0381 - mae: 0.1263 - val_loss: 0.0527 - val_mae: 0.1487
Epoch 8/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0350 - mae: 0.1202 - val_loss: 0.0522 - val_mae: 0.1484
Epoch 9/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0332 - mae: 0.1168 - val_loss: 0.0519 - val_mae: 0.1489
Epoch 10/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0320 - mae: 0.1147 - val_loss: 0.0521 - val_mae: 0.1497
Epoch 11/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0310 - mae: 0.1131 - val_loss: 0.0508 - val_mae: 0.1480
Epoch 12/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0303 - mae: 0.1120 - val_loss: 0.0504 - val_mae: 0.1471
Epoch 13/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0299 - mae: 0.1113 - val_loss: 0.0494 - val_mae: 0.1453
Epoch 14/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0291 - mae: 0.1099 - val_loss: 0.0472 - val_mae: 0.1397
Epoch 15/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0287 - mae: 0.1093 - val_loss: 0.0454 - val_mae: 0.1368
Epoch 16/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0284 - mae: 0.1090 - val_loss: 0.0441 - val_mae: 0.1319
Epoch 17/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0280 - mae: 0.1081 - val_loss: 0.0424 - val_mae: 0.1296
Epoch 18/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0278 - mae: 0.1078 - val_loss: 0.0408 - val_mae: 0.1254
Epoch 19/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0273 - mae: 0.1070 - val_loss: 0.0414 - val_mae: 0.1250
Epoch 20/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0272 - mae: 0.1069 - val_loss: 0.0404 - val_mae: 0.1247
Epoch 21/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0270 - mae: 0.1065 - val_loss: 0.0403 - val_mae: 0.1227
Epoch 22/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0266 - mae: 0.1057 - val_loss: 0.0394 - val_mae: 0.1226
Epoch 23/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0264 - mae: 0.1055 - val_loss: 0.0406 - val_mae: 0.1226
Epoch 24/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0262 - mae: 0.1050 - val_loss: 0.0403 - val_mae: 0.1231
Epoch 25/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0259 - mae: 0.1046 - val_loss: 0.0408 - val_mae: 0.1248
Epoch 26/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0259 - mae: 0.1046 - val_loss: 0.0402 - val_mae: 0.1231
Epoch 27/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0255 - mae: 0.1039 - val_loss: 0.0400 - val_mae: 0.1229
Epoch 28/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0254 - mae: 0.1036 - val_loss: 0.0404 - val_mae: 0.1238
Epoch 29/30
48/48 [==============================] - 1s 16ms/step - loss: 0.0253 - mae: 0.1034 - val_loss: 0.0406 - val_mae: 0.1238
Epoch 30/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0251 - mae: 0.1032 - val_loss: 0.0411 - val_mae: 0.1248
In [ ]:
Seq_train = Model_1.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_1.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
106/106 [==============================] - 1s 5ms/step
[[ 0.27484572  0.22847596  0.08165474 ...  0.05096228  0.2041915
   0.14380695]
 [ 0.23690297  0.13722928  0.09582652 ...  0.08070424  0.20236987
   0.13134947]
 [ 0.23164043  0.10696403  0.07218933 ...  0.06389123  0.19698274
   0.11907854]
 ...
 [ 0.4790719   0.13782428  0.05073661 ...  0.11238191  0.21092981
   0.2516212 ]
 [ 0.25574255  0.06796339  0.05263993 ...  0.0680418   0.19061899
   0.20594832]
 [ 0.19628857  0.0975142   0.10720156 ... -0.00075963  0.19336271
   0.17014413]] (3388, 48, 151)
24/24 [==============================] - 0s 6ms/step
[[0.3124053  0.14211364 0.20956123 ... 0.17417188 0.25319627 0.2011966 ]
 [0.28502336 0.06181093 0.23583895 ... 0.17108765 0.26050225 0.17009452]
 [0.27856544 0.03611451 0.35440898 ... 0.2864716  0.27124074 0.18813622]
 ...
 [0.3339588  0.16520914 0.141316   ... 0.20339552 0.2602288  0.17363743]
 [0.29258224 0.08728077 0.19424194 ... 0.18235575 0.2545526  0.15641329]
 [0.20971344 0.1194092  0.16546547 ... 0.2054269  0.23201317 0.13841936]] (740, 48, 151)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], Seq_train[:,1,:]))
#print('Test Score: %.2f RMSE' % (trainScore))

#trainMAE = np.mean(mae(trainY[:,1,:], Seq_train[:,1,:]))
#print('Test Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.25 RMSE
Test Score: 0.15 RMSE
Test Score: 0.09 MAE
Test Score: 0.16 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()
model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dense(30))
#model.add(Dropout(.5))
#model.add(BatchNormalization())


model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_13 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_27"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_13 (LSTM)               (None, 48, 200)           281600    
_________________________________________________________________
dense_68 (Dense)             (None, 48, 151)           30351     
=================================================================
Total params: 311,951
Trainable params: 311,951
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_W_C1 = model.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=64, callbacks=[lr_decay])
Epoch 1/30
48/48 [==============================] - 6s 121ms/step - loss: 0.0577 - mae: 0.1616 - val_loss: 0.0444 - val_mae: 0.1396
Epoch 2/30
48/48 [==============================] - 6s 117ms/step - loss: 0.0420 - mae: 0.1371 - val_loss: 0.0419 - val_mae: 0.1356
Epoch 3/30
48/48 [==============================] - 6s 119ms/step - loss: 0.0388 - mae: 0.1307 - val_loss: 0.0409 - val_mae: 0.1354
Epoch 4/30
48/48 [==============================] - 6s 116ms/step - loss: 0.0372 - mae: 0.1275 - val_loss: 0.0402 - val_mae: 0.1341
Epoch 5/30
48/48 [==============================] - 6s 121ms/step - loss: 0.0360 - mae: 0.1252 - val_loss: 0.0404 - val_mae: 0.1355
Epoch 6/30
48/48 [==============================] - 6s 120ms/step - loss: 0.0351 - mae: 0.1236 - val_loss: 0.0410 - val_mae: 0.1377
Epoch 7/30
48/48 [==============================] - 6s 121ms/step - loss: 0.0345 - mae: 0.1224 - val_loss: 0.0408 - val_mae: 0.1364
Epoch 8/30
48/48 [==============================] - 6s 117ms/step - loss: 0.0340 - mae: 0.1214 - val_loss: 0.0410 - val_mae: 0.1364
Epoch 9/30
48/48 [==============================] - 6s 119ms/step - loss: 0.0336 - mae: 0.1206 - val_loss: 0.0411 - val_mae: 0.1363
Epoch 10/30
48/48 [==============================] - 6s 120ms/step - loss: 0.0332 - mae: 0.1200 - val_loss: 0.0415 - val_mae: 0.1375
Epoch 11/30
48/48 [==============================] - 6s 118ms/step - loss: 0.0330 - mae: 0.1196 - val_loss: 0.0417 - val_mae: 0.1379
Epoch 12/30
48/48 [==============================] - 6s 116ms/step - loss: 0.0328 - mae: 0.1192 - val_loss: 0.0418 - val_mae: 0.1379
Epoch 13/30
48/48 [==============================] - 6s 115ms/step - loss: 0.0326 - mae: 0.1189 - val_loss: 0.0420 - val_mae: 0.1384
Epoch 14/30
48/48 [==============================] - 6s 120ms/step - loss: 0.0325 - mae: 0.1187 - val_loss: 0.0421 - val_mae: 0.1387
Epoch 15/30
48/48 [==============================] - 6s 118ms/step - loss: 0.0324 - mae: 0.1185 - val_loss: 0.0421 - val_mae: 0.1384
Epoch 16/30
48/48 [==============================] - 6s 121ms/step - loss: 0.0323 - mae: 0.1183 - val_loss: 0.0423 - val_mae: 0.1389
Epoch 17/30
48/48 [==============================] - 6s 118ms/step - loss: 0.0323 - mae: 0.1182 - val_loss: 0.0423 - val_mae: 0.1389
Epoch 18/30
48/48 [==============================] - 5s 113ms/step - loss: 0.0322 - mae: 0.1181 - val_loss: 0.0424 - val_mae: 0.1391
Epoch 19/30
48/48 [==============================] - 6s 119ms/step - loss: 0.0322 - mae: 0.1180 - val_loss: 0.0424 - val_mae: 0.1391
Epoch 20/30
48/48 [==============================] - 6s 118ms/step - loss: 0.0321 - mae: 0.1180 - val_loss: 0.0424 - val_mae: 0.1390
Epoch 21/30
48/48 [==============================] - 6s 122ms/step - loss: 0.0321 - mae: 0.1179 - val_loss: 0.0424 - val_mae: 0.1391
Epoch 22/30
48/48 [==============================] - 6s 118ms/step - loss: 0.0321 - mae: 0.1179 - val_loss: 0.0424 - val_mae: 0.1390
Epoch 23/30
48/48 [==============================] - 6s 120ms/step - loss: 0.0320 - mae: 0.1178 - val_loss: 0.0425 - val_mae: 0.1391
Epoch 24/30
48/48 [==============================] - 6s 118ms/step - loss: 0.0320 - mae: 0.1178 - val_loss: 0.0425 - val_mae: 0.1391
Epoch 25/30
48/48 [==============================] - 6s 118ms/step - loss: 0.0320 - mae: 0.1178 - val_loss: 0.0425 - val_mae: 0.1391
Epoch 26/30
48/48 [==============================] - 6s 116ms/step - loss: 0.0320 - mae: 0.1178 - val_loss: 0.0425 - val_mae: 0.1392
Epoch 27/30
48/48 [==============================] - 6s 117ms/step - loss: 0.0320 - mae: 0.1178 - val_loss: 0.0425 - val_mae: 0.1393
Epoch 28/30
48/48 [==============================] - 6s 116ms/step - loss: 0.0320 - mae: 0.1177 - val_loss: 0.0425 - val_mae: 0.1393
Epoch 29/30
48/48 [==============================] - 6s 117ms/step - loss: 0.0320 - mae: 0.1177 - val_loss: 0.0425 - val_mae: 0.1393
Epoch 30/30
48/48 [==============================] - 6s 118ms/step - loss: 0.0320 - mae: 0.1177 - val_loss: 0.0425 - val_mae: 0.1393
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_W_C1.history['loss'], label='train')
plt.plot(history_W_C1.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C1 = model.predict(trainX, verbose = 1)
print(testingtrain_C1[0], testingtrain_C1.shape)

testingtest_C1 = model.predict(testX, verbose = 1)
print(testingtest_C1[0], testingtest_C1.shape)
106/106 [==============================] - 1s 14ms/step
[[0.31328112 0.17540853 0.13634469 ... 0.11214857 0.17701818 0.09642206]
 [0.36353254 0.17854227 0.15796165 ... 0.09807523 0.19472876 0.086991  ]
 [0.3739678  0.14391342 0.14678031 ... 0.10375595 0.2308886  0.06737533]
 ...
 [0.57648283 0.41739523 0.18706384 ... 0.10799417 0.23594983 0.2159638 ]
 [0.50334847 0.31553358 0.18421367 ... 0.099211   0.25434592 0.19314562]
 [0.43614364 0.2265208  0.18141897 ... 0.07786556 0.26462203 0.17931187]] (3388, 48, 151)
24/24 [==============================] - 0s 13ms/step
[[0.33908534 0.06837083 0.27693743 ... 0.15863009 0.21669066 0.1416415 ]
 [0.41956457 0.09144194 0.3505696  ... 0.19563273 0.27309862 0.17630067]
 [0.46950945 0.09865554 0.37485853 ... 0.21369992 0.28685725 0.21342647]
 ...
 [0.2975213  0.25938892 0.17968209 ... 0.26437983 0.28376114 0.1951591 ]
 [0.3488151  0.277744   0.1764158  ... 0.22225818 0.31094176 0.20893213]
 [0.39007035 0.2790546  0.19149098 ... 0.2195264  0.2855193  0.2248685 ]] (740, 48, 151)
In [ ]:
def rmse(actual, pred):
    return np.sqrt(((pred - actual) ** 2).mean())
def mae(actual, pred):
    return np.mean(np.abs(actual - pred))
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.19 RMSE
Test Score: 0.24 RMSE
Train Score: 0.13 MAE
Test Score: 0.17 MAE
In [ ]:
plt.imshow(testY[:47,47,:])
plt.show()
plt.imshow(testingtest_C1[:47,47,:])
plt.show()
In [ ]:
aa=[x for x in range(200)]
plt.figure(figsize=(20,5))
plt.plot(aa, trainY[:200,47,13], marker='.', label="actual")
plt.plot(aa, testingtrain_C1[:200,47,13], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
X_W_C2 = X_W_C2.drop(columns = 'cluster')
X_W_C2 = X_W_C2.transpose()
X_W_C2.head()
In [ ]:
X_W_C2 = X_W_C2.values
cap = np.percentile(X_W_C2, 97)   
X_W_C2[X_W_C2 > cap] = cap
In [ ]:
training_size=int(X_W_C2.shape[0]*0.80)

test_size=(X_W_C2.shape[0])-training_size

train,test=X_W_C2[0:training_size],X_W_C2[training_size:(X_W_C2.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)
In [ ]:
print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3388, 48, 108) (3388, 48, 108) 
 (740, 48, 108) (740, 48, 108)
In [ ]:
Model_2 = models.Sequential()

Model_2.add(layers.Dense(400, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_2.add(Dropout(.2))
Model_2.add(BatchNormalization())

#Model_2.add(layers.Dense(400, activation='relu'))
#Model_2.add(Dropout(.2))

Model_2.add(layers.Dense(200, activation='relu'))
Model_2.add(Dropout(.2))
Model_2.add(BatchNormalization())

Model_2.add((Dense(trainX.shape[2])))
Model_2.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_2.summary()
Model: "sequential_28"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_69 (Dense)             (None, 48, 400)           43600     
_________________________________________________________________
dropout_48 (Dropout)         (None, 48, 400)           0         
_________________________________________________________________
batch_normalization_41 (Batc (None, 48, 400)           1600      
_________________________________________________________________
dense_70 (Dense)             (None, 48, 200)           80200     
_________________________________________________________________
dropout_49 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_42 (Batc (None, 48, 200)           800       
_________________________________________________________________
dense_71 (Dense)             (None, 48, 108)           21708     
=================================================================
Total params: 147,908
Trainable params: 146,708
Non-trainable params: 1,200
_________________________________________________________________
In [ ]:
model_train = Model_2.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=64)
Epoch 1/30
48/48 [==============================] - 1s 20ms/step - loss: 0.7481 - mae: 0.6521 - val_loss: 0.1487 - val_mae: 0.2591
Epoch 2/30
48/48 [==============================] - 1s 15ms/step - loss: 0.3018 - mae: 0.3990 - val_loss: 0.1225 - val_mae: 0.2672
Epoch 3/30
48/48 [==============================] - 1s 15ms/step - loss: 0.1985 - mae: 0.3196 - val_loss: 0.1182 - val_mae: 0.2515
Epoch 4/30
48/48 [==============================] - 1s 16ms/step - loss: 0.1573 - mae: 0.2813 - val_loss: 0.1178 - val_mae: 0.2457
Epoch 5/30
48/48 [==============================] - 1s 16ms/step - loss: 0.1339 - mae: 0.2575 - val_loss: 0.1189 - val_mae: 0.2432
Epoch 6/30
48/48 [==============================] - 1s 16ms/step - loss: 0.1197 - mae: 0.2413 - val_loss: 0.1187 - val_mae: 0.2411
Epoch 7/30
48/48 [==============================] - 1s 15ms/step - loss: 0.1106 - mae: 0.2305 - val_loss: 0.1180 - val_mae: 0.2374
Epoch 8/30
48/48 [==============================] - 1s 16ms/step - loss: 0.1047 - mae: 0.2233 - val_loss: 0.1166 - val_mae: 0.2353
Epoch 9/30
48/48 [==============================] - 1s 15ms/step - loss: 0.1002 - mae: 0.2180 - val_loss: 0.1159 - val_mae: 0.2356
Epoch 10/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0967 - mae: 0.2142 - val_loss: 0.1153 - val_mae: 0.2358
Epoch 11/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0936 - mae: 0.2104 - val_loss: 0.1147 - val_mae: 0.2339
Epoch 12/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0909 - mae: 0.2075 - val_loss: 0.1139 - val_mae: 0.2325
Epoch 13/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0884 - mae: 0.2047 - val_loss: 0.1121 - val_mae: 0.2309
Epoch 14/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0862 - mae: 0.2021 - val_loss: 0.1119 - val_mae: 0.2302
Epoch 15/30
48/48 [==============================] - 1s 15ms/step - loss: 0.0842 - mae: 0.1999 - val_loss: 0.1119 - val_mae: 0.2290
Epoch 16/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0824 - mae: 0.1980 - val_loss: 0.1137 - val_mae: 0.2313
Epoch 17/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0807 - mae: 0.1962 - val_loss: 0.1134 - val_mae: 0.2311
Epoch 18/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0791 - mae: 0.1943 - val_loss: 0.1138 - val_mae: 0.2315
Epoch 19/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0777 - mae: 0.1929 - val_loss: 0.1132 - val_mae: 0.2317
Epoch 20/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0763 - mae: 0.1913 - val_loss: 0.1137 - val_mae: 0.2320
Epoch 21/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0751 - mae: 0.1901 - val_loss: 0.1144 - val_mae: 0.2314
Epoch 22/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0740 - mae: 0.1887 - val_loss: 0.1147 - val_mae: 0.2318
Epoch 23/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0730 - mae: 0.1877 - val_loss: 0.1153 - val_mae: 0.2315
Epoch 24/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0720 - mae: 0.1865 - val_loss: 0.1156 - val_mae: 0.2343
Epoch 25/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0711 - mae: 0.1855 - val_loss: 0.1165 - val_mae: 0.2341
Epoch 26/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0703 - mae: 0.1847 - val_loss: 0.1167 - val_mae: 0.2354
Epoch 27/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0695 - mae: 0.1837 - val_loss: 0.1174 - val_mae: 0.2360
Epoch 28/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0688 - mae: 0.1829 - val_loss: 0.1165 - val_mae: 0.2351
Epoch 29/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0682 - mae: 0.1822 - val_loss: 0.1177 - val_mae: 0.2376
Epoch 30/30
48/48 [==============================] - 1s 14ms/step - loss: 0.0676 - mae: 0.1815 - val_loss: 0.1177 - val_mae: 0.2352
In [ ]:
Seq_train = Model_2.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_2.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
106/106 [==============================] - 1s 5ms/step
[[0.26835126 0.35883808 0.63749886 ... 0.36737502 0.20905267 0.334898  ]
 [0.2110559  0.29593807 0.69989944 ... 0.47861058 0.09627174 0.32247335]
 [0.25626    0.2986223  0.62375706 ... 0.25398818 0.11156334 0.3275509 ]
 ...
 [0.5562374  0.83305854 0.76094306 ... 1.1207201  0.6304174  0.65161526]
 [0.60285646 1.0402555  0.63655806 ... 1.205954   0.13168782 1.4615449 ]
 [0.3799952  0.6847774  0.5369139  ... 1.0898242  0.23633128 1.2472618 ]] (3388, 48, 108)
24/24 [==============================] - 0s 5ms/step
[[0.40971696 0.17302932 0.53303444 ... 0.42294765 0.5300157  0.6311598 ]
 [0.5073595  0.09928985 0.6038091  ... 0.39624164 0.5024275  0.5059006 ]
 [0.40480515 0.31711102 0.6618516  ... 0.5985732  0.35992607 0.5418183 ]
 ...
 [0.5086468  0.19526975 0.6067925  ... 0.27813378 0.5025579  0.37839222]
 [0.53388256 0.30258682 0.53097165 ... 0.2649055  0.5176017  0.47496915]
 [0.5578969  0.17623377 0.54340696 ... 0.22174186 0.5010949  0.5443976 ]] (740, 48, 108)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.39 RMSE
Test Score: 0.25 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dense(400))
model.add(Dropout(.5))

model.add(Dense(trainX.shape[2]))

model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
#optimizers.Adam(lr=0.001)
model.summary()
WARNING:tensorflow:Layer lstm_14 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_29"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_14 (LSTM)               (None, 48, 200)           247200    
_________________________________________________________________
dropout_50 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
dense_72 (Dense)             (None, 48, 108)           21708     
=================================================================
Total params: 268,908
Trainable params: 268,908
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C2 = model.fit(trainX,trainY, epochs=30, validation_split = 0.05, batch_size=64, callbacks=[lr_decay])
Epoch 1/30
51/51 [==============================] - 6s 116ms/step - loss: 0.1102 - mae: 0.2292 - val_loss: 0.1110 - val_mae: 0.2325
Epoch 2/30
51/51 [==============================] - 6s 119ms/step - loss: 0.1049 - mae: 0.2228 - val_loss: 0.1079 - val_mae: 0.2293
Epoch 3/30
51/51 [==============================] - 6s 121ms/step - loss: 0.1017 - mae: 0.2188 - val_loss: 0.1073 - val_mae: 0.2277
Epoch 4/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0995 - mae: 0.2161 - val_loss: 0.1068 - val_mae: 0.2259
Epoch 5/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0980 - mae: 0.2143 - val_loss: 0.1064 - val_mae: 0.2252
Epoch 6/30
51/51 [==============================] - 6s 121ms/step - loss: 0.0969 - mae: 0.2131 - val_loss: 0.1060 - val_mae: 0.2254
Epoch 7/30
51/51 [==============================] - 6s 115ms/step - loss: 0.0961 - mae: 0.2122 - val_loss: 0.1058 - val_mae: 0.2249
Epoch 8/30
51/51 [==============================] - 6s 111ms/step - loss: 0.0956 - mae: 0.2115 - val_loss: 0.1058 - val_mae: 0.2243
Epoch 9/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0951 - mae: 0.2109 - val_loss: 0.1059 - val_mae: 0.2251
Epoch 10/30
51/51 [==============================] - 6s 115ms/step - loss: 0.0948 - mae: 0.2106 - val_loss: 0.1059 - val_mae: 0.2251
Epoch 11/30
51/51 [==============================] - 6s 117ms/step - loss: 0.0945 - mae: 0.2102 - val_loss: 0.1058 - val_mae: 0.2251
Epoch 12/30
51/51 [==============================] - 6s 118ms/step - loss: 0.0943 - mae: 0.2100 - val_loss: 0.1058 - val_mae: 0.2244
Epoch 13/30
51/51 [==============================] - 6s 118ms/step - loss: 0.0941 - mae: 0.2098 - val_loss: 0.1058 - val_mae: 0.2246
Epoch 14/30
51/51 [==============================] - 6s 119ms/step - loss: 0.0940 - mae: 0.2096 - val_loss: 0.1058 - val_mae: 0.2248
Epoch 15/30
51/51 [==============================] - 6s 120ms/step - loss: 0.0939 - mae: 0.2096 - val_loss: 0.1056 - val_mae: 0.2245
Epoch 16/30
51/51 [==============================] - 6s 115ms/step - loss: 0.0938 - mae: 0.2094 - val_loss: 0.1056 - val_mae: 0.2243
Epoch 17/30
51/51 [==============================] - 6s 125ms/step - loss: 0.0937 - mae: 0.2093 - val_loss: 0.1056 - val_mae: 0.2244
Epoch 18/30
51/51 [==============================] - 6s 114ms/step - loss: 0.0937 - mae: 0.2092 - val_loss: 0.1058 - val_mae: 0.2245
Epoch 19/30
51/51 [==============================] - 6s 118ms/step - loss: 0.0937 - mae: 0.2092 - val_loss: 0.1057 - val_mae: 0.2245
Epoch 20/30
51/51 [==============================] - 6s 117ms/step - loss: 0.0936 - mae: 0.2093 - val_loss: 0.1058 - val_mae: 0.2246
Epoch 21/30
51/51 [==============================] - 6s 119ms/step - loss: 0.0936 - mae: 0.2092 - val_loss: 0.1057 - val_mae: 0.2245
Epoch 22/30
51/51 [==============================] - 6s 114ms/step - loss: 0.0936 - mae: 0.2092 - val_loss: 0.1057 - val_mae: 0.2243
Epoch 23/30
51/51 [==============================] - 6s 118ms/step - loss: 0.0935 - mae: 0.2091 - val_loss: 0.1057 - val_mae: 0.2245
Epoch 24/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0936 - mae: 0.2092 - val_loss: 0.1057 - val_mae: 0.2245
Epoch 25/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0935 - mae: 0.2091 - val_loss: 0.1058 - val_mae: 0.2246
Epoch 26/30
51/51 [==============================] - 6s 116ms/step - loss: 0.0936 - mae: 0.2091 - val_loss: 0.1058 - val_mae: 0.2246
Epoch 27/30
51/51 [==============================] - 6s 117ms/step - loss: 0.0935 - mae: 0.2091 - val_loss: 0.1058 - val_mae: 0.2246
Epoch 28/30
51/51 [==============================] - 6s 120ms/step - loss: 0.0936 - mae: 0.2091 - val_loss: 0.1057 - val_mae: 0.2246
Epoch 29/30
51/51 [==============================] - 6s 119ms/step - loss: 0.0935 - mae: 0.2091 - val_loss: 0.1057 - val_mae: 0.2245
Epoch 30/30
51/51 [==============================] - 6s 118ms/step - loss: 0.0936 - mae: 0.2091 - val_loss: 0.1057 - val_mae: 0.2245
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C2.history['loss'], label='train')
plt.plot(history_C2.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C2 = model.predict(trainX, verbose = 1)
print(testingtrain_C2[0], testingtrain_C2.shape)

testingtest_C2 = model.predict(testX, verbose = 1)
print(testingtest_C2[0], testingtest_C2.shape)
106/106 [==============================] - 1s 14ms/step
[[0.30686814 0.25505736 0.54098696 ... 0.44411176 0.24475904 0.37100273]
 [0.25945133 0.2548089  0.5875555  ... 0.46445656 0.1639393  0.355589  ]
 [0.2162579  0.2335962  0.56580794 ... 0.36382228 0.11775543 0.28003448]
 ...
 [0.5457554  0.7074632  0.8908492  ... 1.0612653  0.4026393  0.8056234 ]
 [0.49329683 0.64040804 0.8593823  ... 1.0388556  0.23603977 0.9509624 ]
 [0.36571693 0.534806   0.78521186 ... 0.9626131  0.13213865 0.74820083]] (3388, 48, 108)
24/24 [==============================] - 0s 14ms/step
[[0.41607383 0.32477972 0.5161604  ... 0.31886455 0.4688798  0.34895664]
 [0.46109742 0.39223364 0.5496266  ... 0.3399914  0.54664767 0.40353847]
 [0.56141853 0.49700466 0.6301761  ... 0.41499358 0.70194733 0.4561572 ]
 ...
 [0.36714727 0.2630195  0.34426126 ... 0.29753202 0.51423335 0.37219012]
 [0.39578408 0.33393264 0.3506294  ... 0.31556022 0.5444559  0.40947753]
 [0.40075573 0.35469747 0.33578318 ... 0.29220438 0.5464924  0.4030459 ]] (740, 48, 108)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

testScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C2[:,1,:]))
print('Train Score: %.2f RMSE' % (testScore))


trainMAE = np.mean(mae(trainY, testingtrain_C2))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY, testingtest_C2))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.38 RMSE
Train Score: 0.31 RMSE
Train Score: 0.20 MAE
Test Score: 0.26 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C2[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C2[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
########cluster 3
X_W_C3 = X_W_C3.drop(columns = 'cluster')
X_W_C3 = X_W_C3.transpose()
X_W_C3.head()
Out[ ]:
26 56 58 79 82 104 109 141 148 157 175 185 191 192 242 248 251 254 255 267 294
Datetime
2012-07-01 00:00:00 0.863 0.139 0.928 0.094 0.137 2.844 0.806 0.245 0.550 0.474 0.111 0.486 0.714 1.331 1.207 0.218 1.160 0.094 1.238 0.761 0.134
2012-07-01 00:30:00 0.813 0.125 0.658 0.087 0.133 2.879 0.775 0.302 0.421 0.454 0.628 0.133 0.700 0.976 1.304 0.232 1.143 0.135 0.930 1.390 0.174
2012-07-01 01:00:00 0.863 0.122 0.223 0.581 0.114 1.264 0.769 0.252 0.423 0.255 1.319 0.131 0.244 1.245 1.319 0.202 2.913 0.092 1.435 1.450 0.165
2012-07-01 01:30:00 0.838 0.133 0.120 0.338 0.159 0.452 0.519 0.255 0.465 0.249 0.109 0.126 0.144 1.971 1.203 0.231 2.924 0.100 0.928 1.841 0.104
2012-07-01 02:00:00 0.838 0.124 0.161 0.269 0.143 0.472 0.525 0.634 0.485 0.273 0.101 0.126 0.170 1.881 1.283 0.221 2.942 0.125 0.796 1.074 0.130
In [ ]:
X_W_C3 = X_W_C3.values
cap = np.percentile(X_W_C2, 97)   
X_W_C2[X_W_C2 > cap] = cap
In [ ]:
training_size=int(X_W_C3.shape[0]*0.80)

test_size=(X_W_C3.shape[0])-training_size

train,test=X_W_C3[0:training_size],X_W_C3[training_size:(X_W_C3.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)


print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3388, 48, 21) (3388, 48, 21) 
 (740, 48, 21) (740, 48, 21)
In [ ]:
                             ###Building a sequential network:
Model_3 = models.Sequential()
Model_3.add(layers.Dense(100, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_3.add(Dropout(.2))

Model_3.add(layers.Dense(50, activation='relu'))
Model_3.add(Dropout(.2))

Model_3.add((Dense(trainX.shape[2])))
Model_3.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_3.summary()
#optimizers.Adam(lr=0.001)
Model: "sequential_31"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_76 (Dense)             (None, 48, 100)           2200      
_________________________________________________________________
dropout_53 (Dropout)         (None, 48, 100)           0         
_________________________________________________________________
dense_77 (Dense)             (None, 48, 50)            5050      
_________________________________________________________________
dropout_54 (Dropout)         (None, 48, 50)            0         
_________________________________________________________________
dense_78 (Dense)             (None, 48, 21)            1071      
=================================================================
Total params: 8,321
Trainable params: 8,321
Non-trainable params: 0
_________________________________________________________________
In [ ]:
model_train = Model_3.fit(trainX,trainY, epochs=30, validation_split = 0.05, batch_size=32)
Epoch 1/30
101/101 [==============================] - 1s 7ms/step - loss: 0.4545 - mae: 0.4793 - val_loss: 0.2645 - val_mae: 0.3678
Epoch 2/30
101/101 [==============================] - 1s 5ms/step - loss: 0.3053 - mae: 0.3961 - val_loss: 0.2523 - val_mae: 0.3580
Epoch 3/30
101/101 [==============================] - 0s 5ms/step - loss: 0.2798 - mae: 0.3779 - val_loss: 0.2459 - val_mae: 0.3563
Epoch 4/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2666 - mae: 0.3676 - val_loss: 0.2472 - val_mae: 0.3559
Epoch 5/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2582 - mae: 0.3609 - val_loss: 0.2435 - val_mae: 0.3561
Epoch 6/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2519 - mae: 0.3557 - val_loss: 0.2416 - val_mae: 0.3565
Epoch 7/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2469 - mae: 0.3515 - val_loss: 0.2425 - val_mae: 0.3527
Epoch 8/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2429 - mae: 0.3483 - val_loss: 0.2437 - val_mae: 0.3580
Epoch 9/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2395 - mae: 0.3452 - val_loss: 0.2432 - val_mae: 0.3553
Epoch 10/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2362 - mae: 0.3426 - val_loss: 0.2438 - val_mae: 0.3554
Epoch 11/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2333 - mae: 0.3401 - val_loss: 0.2437 - val_mae: 0.3558
Epoch 12/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2308 - mae: 0.3380 - val_loss: 0.2457 - val_mae: 0.3569
Epoch 13/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2286 - mae: 0.3360 - val_loss: 0.2442 - val_mae: 0.3544
Epoch 14/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2266 - mae: 0.3345 - val_loss: 0.2436 - val_mae: 0.3534
Epoch 15/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2240 - mae: 0.3326 - val_loss: 0.2435 - val_mae: 0.3514
Epoch 16/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2224 - mae: 0.3309 - val_loss: 0.2436 - val_mae: 0.3518
Epoch 17/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2205 - mae: 0.3296 - val_loss: 0.2438 - val_mae: 0.3506
Epoch 18/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2191 - mae: 0.3284 - val_loss: 0.2431 - val_mae: 0.3502
Epoch 19/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2172 - mae: 0.3272 - val_loss: 0.2428 - val_mae: 0.3507
Epoch 20/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2161 - mae: 0.3263 - val_loss: 0.2420 - val_mae: 0.3515
Epoch 21/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2146 - mae: 0.3251 - val_loss: 0.2429 - val_mae: 0.3490
Epoch 22/30
101/101 [==============================] - 0s 5ms/step - loss: 0.2133 - mae: 0.3243 - val_loss: 0.2425 - val_mae: 0.3504
Epoch 23/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2121 - mae: 0.3232 - val_loss: 0.2399 - val_mae: 0.3467
Epoch 24/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2110 - mae: 0.3225 - val_loss: 0.2420 - val_mae: 0.3484
Epoch 25/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2099 - mae: 0.3217 - val_loss: 0.2408 - val_mae: 0.3447
Epoch 26/30
101/101 [==============================] - 0s 5ms/step - loss: 0.2091 - mae: 0.3212 - val_loss: 0.2402 - val_mae: 0.3475
Epoch 27/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2080 - mae: 0.3203 - val_loss: 0.2398 - val_mae: 0.3466
Epoch 28/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2073 - mae: 0.3198 - val_loss: 0.2399 - val_mae: 0.3470
Epoch 29/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2063 - mae: 0.3190 - val_loss: 0.2406 - val_mae: 0.3473
Epoch 30/30
101/101 [==============================] - 1s 5ms/step - loss: 0.2051 - mae: 0.3181 - val_loss: 0.2417 - val_mae: 0.3473
In [ ]:
Seq_train = Model_3.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_3.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
106/106 [==============================] - 0s 3ms/step
[[0.59158635 0.33104247 0.40218723 ... 0.89711875 0.5384586  0.24696937]
 [0.7800741  0.2736205  0.34548384 ... 0.90921533 0.61527455 0.30762613]
 [1.1086199  0.13000439 0.3830164  ... 0.9202435  0.69989085 0.21080528]
 ...
 [0.8018249  0.56779134 1.5241446  ... 1.2406257  1.0782928  0.36452034]
 [0.75198364 0.48664716 0.6568923  ... 1.1018114  1.0400889  0.3623207 ]
 [0.8822031  0.33894965 0.28666914 ... 1.1867329  0.94048387 0.23593362]] (3388, 48, 21)
24/24 [==============================] - 0s 3ms/step
[[0.50853485 0.43026814 1.3146564  ... 0.9126197  0.6941333  0.43202657]
 [0.3601684  0.36299217 1.6116691  ... 0.63110244 0.3739007  0.23550749]
 [0.6004798  0.2988577  1.9783409  ... 0.7714912  0.46910727 0.40529484]
 ...
 [0.74238026 0.5834657  0.2869327  ... 0.82265973 0.40030208 0.4961266 ]
 [0.6141894  0.960902   0.3937481  ... 0.8775495  0.41770574 0.48504305]
 [0.49610838 0.46772063 1.2451832  ... 0.6434792  0.25900188 0.25039005]] (740, 48, 21)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.61 RMSE
Test Score: 0.44 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(60, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dense(100))
#model.add(Dropout(.2))

model.add(Dense(trainX.shape[2]))
#model.add(BatchNormalization())
#model.add(Dropout(.01))

#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_15 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_32"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_15 (LSTM)               (None, 48, 60)            19680     
_________________________________________________________________
dense_79 (Dense)             (None, 48, 21)            1281      
=================================================================
Total params: 20,961
Trainable params: 20,961
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C3 = model.fit(trainX,trainY, epochs=30, batch_size=32, validation_split = 0.05, callbacks=[lr_decay])
101/101 [==============================] - 11s 109ms/step - loss: 0.1801 - mae: 0.2961 - val_loss: 0.2450 - val_mae: 0.3585
Epoch 7/30
101/101 [==============================] - 11s 111ms/step - loss: 0.1781 - mae: 0.2945 - val_loss: 0.2465 - val_mae: 0.3599
Epoch 8/30
101/101 [==============================] - 11s 109ms/step - loss: 0.1765 - mae: 0.2933 - val_loss: 0.2466 - val_mae: 0.3587
Epoch 9/30
101/101 [==============================] - 11s 110ms/step - loss: 0.1754 - mae: 0.2922 - val_loss: 0.2477 - val_mae: 0.3588
Epoch 10/30
101/101 [==============================] - 11s 113ms/step - loss: 0.1744 - mae: 0.2913 - val_loss: 0.2495 - val_mae: 0.3634
Epoch 11/30
101/101 [==============================] - 11s 111ms/step - loss: 0.1736 - mae: 0.2908 - val_loss: 0.2477 - val_mae: 0.3605
Epoch 12/30
101/101 [==============================] - 11s 107ms/step - loss: 0.1730 - mae: 0.2902 - val_loss: 0.2486 - val_mae: 0.3626
Epoch 13/30
101/101 [==============================] - 11s 111ms/step - loss: 0.1725 - mae: 0.2898 - val_loss: 0.2484 - val_mae: 0.3608
Epoch 14/30
101/101 [==============================] - 11s 112ms/step - loss: 0.1721 - mae: 0.2895 - val_loss: 0.2488 - val_mae: 0.3612
Epoch 15/30
101/101 [==============================] - 11s 113ms/step - loss: 0.1718 - mae: 0.2892 - val_loss: 0.2486 - val_mae: 0.3608
Epoch 16/30
101/101 [==============================] - 11s 112ms/step - loss: 0.1715 - mae: 0.2890 - val_loss: 0.2487 - val_mae: 0.3616
Epoch 17/30
101/101 [==============================] - 11s 110ms/step - loss: 0.1713 - mae: 0.2889 - val_loss: 0.2489 - val_mae: 0.3611
Epoch 18/30
101/101 [==============================] - 11s 109ms/step - loss: 0.1712 - mae: 0.2886 - val_loss: 0.2488 - val_mae: 0.3615
Epoch 19/30
101/101 [==============================] - 11s 112ms/step - loss: 0.1710 - mae: 0.2887 - val_loss: 0.2485 - val_mae: 0.3607
Epoch 20/30
101/101 [==============================] - 11s 111ms/step - loss: 0.1709 - mae: 0.2885 - val_loss: 0.2490 - val_mae: 0.3615
Epoch 21/30
101/101 [==============================] - 11s 112ms/step - loss: 0.1709 - mae: 0.2885 - val_loss: 0.2487 - val_mae: 0.3609
Epoch 22/30
101/101 [==============================] - 11s 108ms/step - loss: 0.1708 - mae: 0.2884 - val_loss: 0.2488 - val_mae: 0.3611
Epoch 23/30
101/101 [==============================] - 12s 114ms/step - loss: 0.1707 - mae: 0.2884 - val_loss: 0.2486 - val_mae: 0.3609
Epoch 24/30
101/101 [==============================] - 11s 111ms/step - loss: 0.1707 - mae: 0.2883 - val_loss: 0.2487 - val_mae: 0.3611
Epoch 25/30
101/101 [==============================] - 11s 110ms/step - loss: 0.1707 - mae: 0.2882 - val_loss: 0.2489 - val_mae: 0.3612
Epoch 26/30
101/101 [==============================] - 11s 113ms/step - loss: 0.1706 - mae: 0.2883 - val_loss: 0.2487 - val_mae: 0.3610
Epoch 27/30
101/101 [==============================] - 11s 113ms/step - loss: 0.1706 - mae: 0.2882 - val_loss: 0.2488 - val_mae: 0.3612
Epoch 28/30
101/101 [==============================] - 11s 111ms/step - loss: 0.1706 - mae: 0.2883 - val_loss: 0.2487 - val_mae: 0.3610
Epoch 29/30
101/101 [==============================] - 11s 111ms/step - loss: 0.1706 - mae: 0.2882 - val_loss: 0.2489 - val_mae: 0.3613
Epoch 30/30
101/101 [==============================] - 11s 112ms/step - loss: 0.1706 - mae: 0.2882 - val_loss: 0.2488 - val_mae: 0.3610
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C3.history['loss'], label='train')
plt.plot(history_C3.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C3 = model.predict(trainX, verbose = 1)
print(testingtrain_C3[0], testingtrain_C3.shape)

testingtest_C3 = model.predict(testX, verbose = 1)
print(testingtest_C3[0], testingtest_C3.shape)
106/106 [==============================] - 1s 11ms/step
[[0.7302824  0.4968315  0.755315   ... 1.0567318  0.66575    0.35550448]
 [0.8484683  0.46364552 0.7511694  ... 1.1150124  0.8074895  0.34558132]
 [0.90193015 0.3025523  0.5831425  ... 1.1849743  0.7387864  0.28042403]
 ...
 [1.124224   0.5054705  1.4895638  ... 1.1956697  1.3595233  0.3145533 ]
 [0.99690145 0.4529236  0.5466541  ... 1.2644231  1.8515813  0.15654065]
 [0.9723589  0.2709913  0.3646723  ... 1.2303087  1.4909432  0.17304924]] (3388, 48, 21)
24/24 [==============================] - 0s 10ms/step
[[0.54883814 0.5283423  1.1063478  ... 0.90234977 0.6407313  0.61144596]
 [0.3335674  0.45864937 1.3922515  ... 0.7470738  0.52467734 0.6573902 ]
 [0.30844152 0.27292225 1.6249537  ... 0.6222907  0.49204278 0.5528554 ]
 ...
 [0.81444806 0.6323308  0.44589275 ... 0.80101335 0.3460816  0.7907529 ]
 [0.81586736 0.811895   0.4695527  ... 0.8675693  0.37190455 0.7916719 ]
 [0.8438496  0.69108367 1.1838888  ... 0.84843105 0.40798157 0.7374955 ]] (740, 48, 21)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C3[:,1,:]))
print('Test Score: %.2f RMSE' % (trainScore))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C3[:,1,:]))
print('Test Score: %.2f MAE' % (trainMAE))
Test Score: 0.58 RMSE
Test Score: 0.49 RMSE
Test Score: 0.44 MAE
Test Score: 0.35 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C3[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C3[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
########cluster 4
X_W_C4 = X_W_C4.drop(columns = 'cluster')
X_W_C4 = X_W_C4.transpose()
X_W_C4.head()
Out[ ]:
6 8 62 94 96 114 131 134 147 149 152 188 200 228 256 276 279 293 299
Datetime
2012-07-01 00:00:00 0.260 0.208 0.168 0.511 0.151 0.085 2.196 2.005 0.288 0.075 0.288 0.072 1.283 0.186 0.856 0.081 0.094 0.183 0.541
2012-07-01 00:30:00 0.253 0.151 0.175 0.504 0.084 0.093 1.362 1.423 0.288 0.075 0.275 0.067 0.934 0.199 0.546 0.125 0.063 0.177 0.100
2012-07-01 01:00:00 0.180 0.092 0.188 0.473 0.138 0.075 1.405 2.061 0.563 0.475 0.263 0.066 1.022 0.207 0.498 0.154 0.106 0.193 0.090
2012-07-01 01:30:00 0.220 0.152 0.344 0.513 0.088 0.102 1.386 1.577 0.363 0.125 0.288 0.063 1.022 0.212 0.138 0.188 0.081 0.185 0.094
2012-07-01 02:00:00 0.171 0.083 0.172 0.491 0.118 0.075 1.375 1.410 0.350 0.125 0.263 0.065 0.936 0.158 0.255 0.135 0.106 0.176 0.046
In [ ]:
X_W_C4 = X_W_C4.values
cap = np.percentile(X_W_C4, 97)   
X_W_C4[X_W_C4 > cap] = cap
In [ ]:
training_size=int(X_W_C4.shape[0]*0.80)

test_size=(X_W_C4.shape[0])-training_size

train,test=X_W_C4[0:training_size],X_W_C4[training_size:(X_W_C4.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3388, 48, 19) (3388, 48, 19) 
 (740, 48, 19) (740, 48, 19)
In [ ]:
                             ###Building a sequential network:
Model_4 = models.Sequential()
Model_4.add(layers.Dense(200, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_4.add(Dropout(0.2))
Model_4.add(BatchNormalization())

Model_4.add(layers.Dense(100, activation='relu'))
Model_4.add(Dropout(0.2))
Model_4.add(BatchNormalization())

Model_4.add((Dense(trainX.shape[2])))
Model_4.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_4.summary()
Model: "sequential_33"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_80 (Dense)             (None, 48, 200)           4000      
_________________________________________________________________
dropout_55 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_43 (Batc (None, 48, 200)           800       
_________________________________________________________________
dense_81 (Dense)             (None, 48, 100)           20100     
_________________________________________________________________
dropout_56 (Dropout)         (None, 48, 100)           0         
_________________________________________________________________
batch_normalization_44 (Batc (None, 48, 100)           400       
_________________________________________________________________
dense_82 (Dense)             (None, 48, 19)            1919      
=================================================================
Total params: 27,219
Trainable params: 26,619
Non-trainable params: 600
_________________________________________________________________
In [ ]:
model_train = Model_4.fit(trainX,trainY, epochs=30, validation_split = 0.05, batch_size=32)
Epoch 1/30
101/101 [==============================] - 1s 9ms/step - loss: 1.2928 - mae: 0.8627 - val_loss: 0.3262 - val_mae: 0.3817
Epoch 2/30
101/101 [==============================] - 1s 7ms/step - loss: 0.5106 - mae: 0.5167 - val_loss: 0.2893 - val_mae: 0.3983
Epoch 3/30
101/101 [==============================] - 1s 7ms/step - loss: 0.3912 - mae: 0.4469 - val_loss: 0.2700 - val_mae: 0.3740
Epoch 4/30
101/101 [==============================] - 1s 7ms/step - loss: 0.3464 - mae: 0.4175 - val_loss: 0.2644 - val_mae: 0.3673
Epoch 5/30
101/101 [==============================] - 1s 7ms/step - loss: 0.3226 - mae: 0.4018 - val_loss: 0.2602 - val_mae: 0.3594
Epoch 6/30
101/101 [==============================] - 1s 7ms/step - loss: 0.3089 - mae: 0.3926 - val_loss: 0.2574 - val_mae: 0.3533
Epoch 7/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2987 - mae: 0.3860 - val_loss: 0.2616 - val_mae: 0.3537
Epoch 8/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2907 - mae: 0.3804 - val_loss: 0.2573 - val_mae: 0.3430
Epoch 9/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2833 - mae: 0.3758 - val_loss: 0.2610 - val_mae: 0.3457
Epoch 10/30
101/101 [==============================] - 1s 8ms/step - loss: 0.2775 - mae: 0.3719 - val_loss: 0.2605 - val_mae: 0.3421
Epoch 11/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2715 - mae: 0.3677 - val_loss: 0.2624 - val_mae: 0.3431
Epoch 12/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2664 - mae: 0.3642 - val_loss: 0.2589 - val_mae: 0.3427
Epoch 13/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2608 - mae: 0.3606 - val_loss: 0.2627 - val_mae: 0.3452
Epoch 14/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2560 - mae: 0.3570 - val_loss: 0.2615 - val_mae: 0.3468
Epoch 15/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2512 - mae: 0.3541 - val_loss: 0.2627 - val_mae: 0.3481
Epoch 16/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2467 - mae: 0.3510 - val_loss: 0.2645 - val_mae: 0.3436
Epoch 17/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2427 - mae: 0.3480 - val_loss: 0.2667 - val_mae: 0.3454
Epoch 18/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2389 - mae: 0.3454 - val_loss: 0.2633 - val_mae: 0.3451
Epoch 19/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2347 - mae: 0.3425 - val_loss: 0.2666 - val_mae: 0.3499
Epoch 20/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2315 - mae: 0.3403 - val_loss: 0.2633 - val_mae: 0.3491
Epoch 21/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2275 - mae: 0.3375 - val_loss: 0.2651 - val_mae: 0.3476
Epoch 22/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2244 - mae: 0.3355 - val_loss: 0.2691 - val_mae: 0.3509
Epoch 23/30
101/101 [==============================] - 1s 8ms/step - loss: 0.2222 - mae: 0.3342 - val_loss: 0.2694 - val_mae: 0.3470
Epoch 24/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2189 - mae: 0.3317 - val_loss: 0.2729 - val_mae: 0.3503
Epoch 25/30
101/101 [==============================] - 1s 7ms/step - loss: 0.2164 - mae: 0.3297 - val_loss: 0.2691 - val_mae: 0.3506
Epoch 26/30
101/101 [==============================] - 1s 8ms/step - loss: 0.2143 - mae: 0.3281 - val_loss: 0.2749 - val_mae: 0.3529
Epoch 27/30
101/101 [==============================] - 1s 8ms/step - loss: 0.2118 - mae: 0.3265 - val_loss: 0.2700 - val_mae: 0.3520
Epoch 28/30
101/101 [==============================] - 1s 8ms/step - loss: 0.2102 - mae: 0.3253 - val_loss: 0.2765 - val_mae: 0.3561
Epoch 29/30
101/101 [==============================] - 1s 8ms/step - loss: 0.2081 - mae: 0.3236 - val_loss: 0.2765 - val_mae: 0.3507
Epoch 30/30
101/101 [==============================] - 1s 8ms/step - loss: 0.2065 - mae: 0.3223 - val_loss: 0.2747 - val_mae: 0.3549
In [ ]:
Seq_train = Model_4.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_4.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
106/106 [==============================] - 0s 3ms/step
[[ 0.5281188   0.17726314  0.31123376  0.64639014  0.6096178   0.14278796
   1.137798    0.95694184  0.6258129   0.3064372   0.3945865   0.15011805
   1.00307     0.37511697  0.3902641   0.19563873  0.1493643   0.29939422
   0.3545963 ]
 [ 0.45972753  0.12882063  0.19287992  0.53828466  0.27187008  0.13407493
   1.0752013   0.9629417   0.53822553  0.1781026   0.2820459   0.10969541
   1.005238    0.28753936  0.32716072  0.06994903  0.13752268  0.31238496
   0.3026008 ]
 [ 0.49119544  0.16260673  0.20538849  0.58906513  0.41181505  0.17848012
   1.0687144   0.97553754  0.5167337   0.24306896  0.32712036  0.13647622
   0.9647866   0.35144085  0.35352927  0.11802077  0.22701757  0.3359024
   0.38339904]
 [ 0.46764943  0.1564916   0.224518    0.5788848   0.21210743  0.22636135
   1.00752     0.98225147  0.5547199   0.19837205  0.29455256  0.14673
   0.90909004  0.3496998   0.34488404  0.10088073  0.27124727  0.3166514
   0.40589568]
 [ 0.44556203  0.14013977  0.16728908  0.55791557  0.20011806  0.1732398
   1.0000994   0.96608573  0.5457446   0.1805768   0.2722493   0.13035408
   0.9767955   0.28640163  0.33922833  0.07614411  0.21438794  0.31610897
   0.3395826 ]
 [ 0.4575894   0.1517079   0.21454385  0.5147541   0.23787956  0.14271933
   0.9590472   0.917592    0.45729423  0.19489753  0.31729198  0.10588422
   0.91904825  0.27394646  0.31636626  0.07421832  0.14251205  0.27544403
   0.31586632]
 [ 0.45087892  0.150165    0.20025842  0.5237669   0.25240648  0.15553895
   0.97606516  0.9206599   0.4730307   0.17844722  0.3200666   0.11097193
   0.9052694   0.27048513  0.329808    0.0745851   0.2065613   0.28729182
   0.33852333]
 [ 0.45318002  0.15614307  0.18503346  0.46362424  0.29036707  0.14577076
   0.9529065   0.9350119   0.434848    0.17460278  0.30739012  0.09646196
   0.92334527  0.25646508  0.29253766  0.08882761  0.20990601  0.3130458
   0.29860228]
 [ 0.44313598  0.13241342  0.24366017  0.44808742  0.18424892  0.16074884
   0.94954133  0.94645536  0.4047849   0.17705984  0.28974867  0.08859377
   0.87841696  0.28977382  0.25569838  0.06889506  0.23015879  0.32730684
   0.2941443 ]
 [ 0.41350996  0.11432648  0.2506569   0.36263755  0.16042328  0.1774186
   0.96553874  1.0144818   0.70392895  0.13856578  0.2557692   0.0664186
   0.93213594  0.30860916  0.20852031  0.01334415  0.3134287   0.4840413
   0.28302854]
 [ 0.4351499   0.0936923   0.19101116  0.29917854  0.10492956  0.23064868
   1.0169356   1.0254146   0.6413943   0.11794402  0.23541826  0.07481342
   0.9357824   0.30597848  0.21361814  0.0429453   0.3679054   0.5045585
   0.2507137 ]
 [ 0.5248635   0.112272    0.31848335  0.500406    0.5107161   0.11114438
   1.0998919   0.98471224  0.6048611   0.34785733  0.2997045   0.2515642
   0.84699106  0.45369822  0.40280277  0.20248052  0.32872757  0.44558018
   0.3839506 ]
 [ 0.4353463   0.06387758  0.13847128  0.2502432   0.04044095  0.24595615
   1.0324349   1.0318303   0.5815259   0.13444725  0.22344036  0.08738085
   0.87618524  0.3255667   0.20280388  0.08981246  0.3696465   0.49412933
   0.23065665]
 [ 0.47819084  0.14734852  0.18452656  0.25764644  0.03085633  0.34990567
   0.9141729   1.071677    0.6082539   0.15438682  0.21316208  0.12793492
   0.850385    0.31265813  0.401272    0.33109125  0.35038695  0.46893284
   0.27076352]
 [ 0.7691198   0.32300156  0.5235309   0.38442534  0.334533    0.99848497
   1.0214983   1.8506093   0.79160994  0.408135    0.36653474  0.3874392
   0.8358373   0.484624    1.0214659   1.2699361   0.5431936   0.28170383
   0.61716926]
 [ 0.6989611   0.43152106  0.33092517  0.706283    0.9371689   0.73571116
   1.2423854   1.4516999   0.86591995  0.4827605   0.5394561   0.37613988
   0.80369264  0.5845306   1.0399909   0.9427631   0.6893135   0.4237477
   0.5226772 ]
 [ 0.6866781   0.42141563  0.4308746   0.65694296  1.117733    0.44997987
   1.1211116   1.2138525   0.7234157   0.5540831   0.66729     0.2720811
   0.7784597   0.55301344  0.86439705  0.74163496  0.6883429   0.596004
   0.49027932]
 [ 0.9484874   0.20615576  0.36348143  0.8092536   1.006935    1.2368597
   1.5816376   1.7469257   0.5849488   0.59802395  0.75067765  0.7912269
   0.93487126  0.42235333  1.0160646   0.86847633  1.3109099   0.8851297
   0.50254023]
 [ 0.78118336  0.7284132   0.5249875   0.4884982   1.3937702   0.9078893
   1.6333287   1.1101758   0.6318748   0.34277976  0.64020234  0.3757159
   0.8910698   0.42360532  0.2906415   0.7178197   1.0900828   0.6041969
   0.36973113]
 [ 0.72875005  0.88136744  0.79550844  0.76672584  2.3437982   1.2600746
   2.020122    1.4380914   0.9328641   0.48379663  0.93174464  0.5552193
   0.77442026  0.34899223  0.5050782   0.99827564  0.70720613  0.5599263
   0.6348839 ]
 [ 0.685637    1.0317414   1.1324384   0.59431744  2.3700335   0.7457633
   2.138922    1.0301636   1.0828608   0.3855443   0.90486956  0.2314572
   0.7471248   0.41122395  0.22546986  0.8268215   0.58192915  0.5154777
   0.3682349 ]
 [ 0.5900618   0.90051866  1.1458378   0.65587854  2.5376472   0.48263228
   1.9531099   0.85952044  1.1148504   0.61191326  0.90182865  0.19725451
   0.72214264  0.59693956  0.3511258   0.6935121   0.5124278   0.55334437
   0.3232898 ]
 [ 0.71361315  1.004976    0.8450621   0.75935566  2.2858515   0.75980556
   1.7403748   1.0421256   0.95150596  0.6846912   1.0938543   0.5426345
   0.6530753   0.47610602  0.5796609   0.70103824  0.6145173   0.6599554
   0.53220063]
 [ 0.86422455  0.70535594  0.6131033   0.5190229   2.0844622   0.6978249
   2.1001363   1.0333532   0.9633402   0.807174    0.7328855   0.2576
   0.9648321   0.449884    0.4773472   0.69497347  0.73883986  0.33651823
   0.45158207]
 [ 0.74248064  0.68734     0.8974616   0.7782341   2.1845498   0.60902977
   1.7521163   1.5062149   1.0129522   0.59710264  1.0236261   0.19544859
   0.9554229   0.652061    0.31232548  0.6912049   0.77425075  0.48067868
   0.41423345]
 [ 0.7020059   0.60584944  0.93156165  0.85254776  2.2430446   0.60189474
   1.749656    1.768313    1.0736417   0.55571294  1.0128404   0.22981448
   1.0556765   0.6814041   0.38824797  0.685649    0.7796525   0.5778171
   0.4163434 ]
 [ 0.66257066  0.5783709   0.75227785  0.7906521   1.8633173   0.5534136
   1.386906    1.2632215   0.9273364   0.5474651   0.8157857   0.24392942
   0.8931503   0.65839463  0.3838604   0.54460776  0.6891733   0.6024565
   0.48797715]
 [ 0.512394    0.5114891   0.47245196  0.7075987   1.3211871   0.4884923
   0.9707579   0.9570893   0.8072214   0.4967404   0.72760856  0.12717599
   0.66334164  0.74665403  0.2810819   0.47923577  0.5854013   0.5070881
   0.5996554 ]
 [ 0.5477084   0.3680969   0.26310557  0.69832325  1.091672    0.3378923
   1.0187974   0.939658    0.6867239   0.3804912   0.6512406   0.17416067
   0.60571253  0.5650922   0.5113457   0.5700381   0.7350427   0.3314883
   0.4213789 ]
 [ 0.6511893   0.30855775  0.29252174  0.69426346  1.1782175   0.5575636
   0.74432135  0.77828854  1.0200534   0.47860157  0.61079603  0.1029159
   0.6673428   0.65463614  0.48196214  0.59773946  1.0251261   0.3712806
   0.5900878 ]
 [ 0.739535    0.5240243   0.29536194  0.73614776  1.3816884   0.52829313
   1.0388944   1.0784092   1.1380662   0.4898245   0.698737    0.45329162
   0.8960974   0.71906245  0.5601591   0.5372574   0.9202503   0.5483987
   0.67624116]
 [ 0.8707535   0.62494105  0.269618    0.85179746  1.477401    0.5901084
   1.0597999   1.1319996   1.1587762   0.32021618  0.84605515  0.4330532
   0.9835351   0.61475503  0.60088193  0.5164145   1.0020187   0.64547276
   0.8656421 ]
 [ 0.9294033   0.3754468   0.1766656   0.7628195   1.6702116   1.2296618
   1.0252192   1.090902    1.2077912   0.3884225   0.68297845  0.24587134
   0.9014415   0.38758987  0.6438812   0.74226993  1.4571222   0.5763669
   0.7640717 ]
 [ 1.163678    0.5285203   0.02063741  0.8088357   1.9948301   1.7792739
   1.0853758   1.4652187   1.4935012   0.15164243  1.0246724   0.3851817
   1.0180085   0.16799168  0.6070436   0.8850131   1.8801867   0.7090819
   1.0897632 ]
 [ 1.2756605   0.52224016 -0.05970307  0.8827201   2.0889814   1.8898487
   1.0044618   1.5103236   1.3470662   0.01693705  1.1850166   0.516081
   0.94431865  0.03496452  0.47192693  0.9246297   2.0055113   0.61907184
   1.2806402 ]
 [ 1.202366    0.6850225   0.07278754  1.1261953   2.2061534   1.3522705
   0.9521672   1.9864588   1.1935577   0.02227858  1.5474102   1.2099919
   0.8724434   0.15888163  0.74558985  1.2482655   1.6239599   0.36022556
   1.2432787 ]
 [ 1.1111583   0.81510043  0.23751609  0.9845444   2.2802436   1.2362134
   1.498202    2.1646914   1.7627451   0.45262164  1.3740079   1.4550298
   1.0844271   0.80869377  0.98843455  1.4648781   1.4206086   1.2545811
   1.3983017 ]
 [ 1.3132228   1.072835    0.2374491   1.2446965   2.7726634   1.1461165
   1.6557994   2.3809621   2.4629087   0.5271914   1.4256837   2.153696
   1.4233768   0.78464127  0.70338285  1.515027    1.5825499   2.058207
   1.5345622 ]
 [ 1.308326    0.92798316  0.23714764  1.1730478   2.490575    1.2669654
   1.6279477   2.4634008   2.1701818   0.86170477  1.7185364   2.0340984
   1.5575032   0.68049955  0.38715237  1.5286345   1.6158961   2.0502496
   1.518083  ]
 [ 1.2926294   0.9904183   0.6765589   1.3480998   2.4006448   1.42991
   2.056491    2.066021    2.096921    1.4967159   1.7492378   2.6543875
   1.3972342   1.0544194   0.6764481   1.2093565   1.5466014   2.1555595
   1.1862564 ]
 [ 1.3503143   1.4753268   0.16063026  1.5495087   2.713612    1.3059497
   2.2387938   2.4134018   2.7918308   1.5328286   1.6637113   2.4055629
   1.2921078   1.5215598   1.1405693   1.313715    1.5691134   1.7029938
   1.812123  ]
 [ 1.1117644   1.2601728   0.37328723  1.375029    2.479762    0.91993874
   2.0433002   1.7082316   2.606619    1.3765885   1.5083184   1.2011768
   1.1133525   1.3667333   1.0985075   0.9007798   1.3953182   2.201853
   1.5984411 ]
 [ 1.1425253   0.8868531   0.42338574  1.2395792   2.5425768   0.8351611
   2.346042    1.9926126   2.6870492   1.349181    1.517694    1.2295169
   1.2340367   1.5861882   1.634732    0.75749004  1.5649191   2.372951
   1.2805533 ]
 [ 1.1066831   0.8263079   0.3507772   1.2060935   2.738394    0.72493654
   2.2982724   2.1588705   2.798377    1.2927324   1.4345176   1.0081643
   1.2144344   1.5778965   1.7840931   0.7882648   1.5400808   2.3246446
   1.0595793 ]
 [ 1.0006199   0.43154317  0.3569675   1.3277954   2.6857965   0.60878223
   2.1121235   1.6168654   2.480754    1.2242366   1.3438368   0.8276183
   1.1156971   1.5569664   1.6081284   0.62138826  1.5609488   1.9158875
   0.80654514]
 [ 0.80992603  0.4481742   0.64041895  0.93783367  2.43294     0.36072296
   1.6537111   1.5561512   1.9949375   0.7506261   0.8356587   0.35684916
   1.1314442   1.104056    1.1137674   0.29489812  0.908039    1.217838
   0.5128329 ]
 [ 0.8091042   0.3199461   0.697255    0.94321775  1.9683384   0.24038866
   1.285855    1.4382169   1.6726991   0.50950736  0.6040044   0.3355942
   1.1814454   1.3058144   0.66538715  0.1661126   0.84815943  0.94215286
   0.40269703]
 [ 0.6145439   0.09370713  0.6766202   0.8653017   1.4509848   0.20800385
   1.2689202   1.0790461   1.2907264   0.4969893   0.4060788   0.2267034
   1.0573969   1.3946875   0.35877746 -0.01382276  0.4501111   0.7052896
   0.20627679]] (3388, 48, 19)
24/24 [==============================] - 0s 3ms/step
[[0.61239696 0.58616734 0.7326478  0.82584465 0.88562995 0.8057345
  0.8264698  0.26540506 0.6741091  0.870638   0.7248254  0.16702585
  0.56345373 0.88064635 0.43247318 0.4342084  0.48859194 0.7019365
  0.9887816 ]
 [0.55884993 0.41367528 0.30048555 0.8230926  0.89427423 0.76281935
  0.448814   0.06654859 1.0522618  0.69136804 0.6557362  0.15212233
  0.70428073 0.66462433 0.33860677 0.41717786 0.9365485  0.6824964
  1.2747906 ]
 [0.525927   0.5991838  0.5059796  0.8065188  0.7781008  0.44378284
  0.4439274  0.17117515 1.024549   0.88383067 0.5828656  0.18014133
  0.48284853 0.84436154 0.41291714 0.361393   0.5740782  0.50458
  1.193187  ]
 [0.9374002  0.27798653 0.13759205 0.78880215 1.6302909  1.2847763
  0.97337157 0.98995245 0.83023703 0.5112684  0.77581483 0.22000295
  0.9390164  0.45419055 0.47970468 0.56278884 1.1720041  0.73244286
  0.73234856]
 [0.5910851  1.069775   0.7534159  0.97599125 0.87969404 0.81572455
  0.36537617 0.16515486 0.5582646  0.9094554  0.70469356 0.13925543
  0.3811257  0.74888337 0.52417576 0.5327258  0.5767584  0.4601269
  1.3951247 ]
 [0.7458015  0.4755329  0.79711777 0.7132205  0.85847896 0.83726376
  1.2535031  1.608217   0.74273324 0.7012732  0.47800267 0.77939606
  0.80791235 0.31013215 0.5136657  0.57309186 1.0650473  0.77010083
  0.5993379 ]
 [1.0399203  0.8620688  0.12501481 0.56931853 1.252321   1.2092283
  1.0423207  1.8810866  0.41647154 0.24508557 0.96249956 1.5723232
  0.8909423  0.2623223  0.66067755 1.2506144  1.355192   1.1521001
  1.1402578 ]
 [1.1177558  0.7945863  0.22996499 0.9933876  1.7666698  1.723137
  1.4491292  1.840367   1.4952172  0.92914397 1.2216136  2.3674726
  1.0993924  0.7628535  0.95742166 1.9012349  1.6984949  0.86747706
  2.0253992 ]
 [1.0313305  0.75857043 0.39170095 0.8614296  1.5719405  1.4496053
  1.3047799  2.3112733  1.6633326  0.68370855 1.0377414  1.1467253
  1.1624045  0.817495   1.4344299  1.6642423  1.6354777  1.0254948
  1.4028947 ]
 [0.9423759  1.0050509  0.3325631  0.66346455 1.4416118  1.0132926
  1.2141333  1.4878219  1.1528404  0.465769   0.7414447  1.0597858
  0.91778404 0.7079764  1.018021   1.0249798  1.0234958  0.33174232
  0.87668884]
 [0.9344882  0.7403743  0.3836639  1.049609   1.568054   0.79281986
  1.4398081  1.4977679  1.1216986  0.6604674  0.9934002  1.1502476
  0.66780794 0.79385436 1.1209787  0.9457816  1.1727644  0.2677566
  0.9391242 ]
 [1.1274085  1.1177076  0.20739438 1.4996662  2.2993767  1.0291405
  1.5961744  1.7911229  1.8396763  0.87181157 1.5340648  1.7577765
  1.1187594  1.0358037  1.0982088  1.177166   1.3760412  1.0701449
  1.413637  ]
 [1.0268924  0.43367857 0.24023336 0.7622509  1.9438484  0.97571135
  1.3558271  1.5361179  1.0180695  0.38470095 1.222262   0.3617725
  1.0277131  0.37859344 0.49874917 0.6419612  1.0484731  1.0051436
  0.67409277]
 [0.97803724 0.3951518  0.25821614 0.803108   2.0466216  0.7838744
  1.5835047  1.2248135  1.5284319  0.6214836  0.9165288  0.21094728
  1.1720793  0.5540218  0.3102821  0.48422605 0.83737576 0.9552343
  0.5056996 ]
 [0.84793305 0.45177084 0.17873499 0.6249152  1.5771662  0.54716134
  1.386737   1.1948714  1.075824   0.5800892  0.7064649  0.3420024
  1.0576258  0.72929597 0.5033238  0.51844215 0.7924721  0.8923249
  0.7245896 ]
 [0.7207264  0.8096508  0.4225734  0.76448584 0.8799488  0.81719154
  0.6655764  0.75233185 0.74695224 0.4192278  0.8245581  0.3114776
  0.570521   0.5380238  0.6612377  0.766304   0.7950754  0.76184833
  0.85431314]
 [0.61005056 0.41086516 0.36033928 0.6873851  0.83853585 0.4246732
  0.71686745 0.43998164 0.8118361  0.75396127 0.6660174  0.16141427
  0.6232734  0.73269415 0.48154128 0.53067565 0.63132125 0.6122154
  0.70330566]
 [0.23403683 0.16595696 0.3364777  0.35741705 0.25678423 0.06991114
  0.42096815 1.0416667  0.3484784  0.33757594 0.49467224 0.21602431
  0.29500967 0.33253443 0.22881001 0.18423977 0.27729863 0.41627777
  0.18124563]
 [0.3339232  0.23418358 0.41709018 0.34550184 0.45005393 0.08895332
  0.42721593 0.8802978  0.39088452 0.36327633 0.430985   0.14084803
  0.36559582 0.36155522 0.2158062  0.2506227  0.21632265 0.25066823
  0.26818004]
 [0.33844835 0.12830827 0.14228567 0.25087643 0.5205663  0.01627308
  0.5545451  0.9236029  0.33318734 0.2295466  0.3015205  0.14374723
  0.39792877 0.2501398  0.2745737  0.14693809 0.1359953  0.22186047
  0.22758503]
 [0.33833593 0.11121848 0.18475188 0.20497127 0.11059734 0.08438563
  0.43723726 0.7354917  0.18097371 0.20279981 0.31302607 0.08385784
  0.32447445 0.23691706 0.25776917 0.2055816  0.19463941 0.18492417
  0.20108767]
 [0.30404797 0.12239906 0.12420304 0.1909045  0.17642596 0.08140725
  0.4739955  0.7599676  0.12915537 0.1414885  0.30690712 0.10540266
  0.333023   0.16448694 0.28424627 0.1785376  0.18634872 0.1815152
  0.2167483 ]
 [0.2798584  0.05825324 0.12025749 0.17994958 0.02799909 0.01282009
  0.38962638 0.6896673  0.09618497 0.13236149 0.2880779  0.07962672
  0.26543468 0.20770985 0.28165203 0.15573789 0.10924722 0.12456553
  0.1805267 ]
 [0.303379   0.11304914 0.10307935 0.16794366 0.13555852 0.05764329
  0.4581717  0.75721383 0.08876556 0.10704562 0.29457253 0.09629021
  0.34247985 0.14552972 0.27388474 0.15285465 0.1559962  0.15644144
  0.20876096]
 [0.3458623  0.10934366 0.19264574 0.1528047  0.04454289 0.09962504
  0.3956722  0.7221102  0.19936314 0.1821153  0.28041768 0.0689871
  0.3351409  0.21148002 0.24725269 0.19518304 0.16792896 0.15789615
  0.19683924]
 [0.20936829 0.13524202 0.16870129 0.25867528 0.12534755 0.00585172
  0.35573235 0.82395315 0.12939711 0.19975214 0.3187401  0.11247181
  0.18178803 0.20229682 0.27297246 0.18826345 0.1259537  0.19818953
  0.19783576]
 [0.30111507 0.08698814 0.14602724 0.17836986 0.03812055 0.02922046
  0.4032048  0.709732   0.13754116 0.1532993  0.28066227 0.07287349
  0.3034426  0.19728532 0.24487947 0.14222294 0.10699601 0.13249536
  0.18311003]
 [0.288475   0.09678416 0.14324227 0.196077   0.05344081 0.07305183
  0.45064682 0.714842   0.11547463 0.15561181 0.30289954 0.09158903
  0.26400056 0.19816898 0.26486346 0.17186937 0.18689628 0.1790152
  0.18930583]
 [0.25303745 0.08328393 0.23714994 0.18546668 0.07604358 0.03103592
  0.36564204 0.8178153  0.03379074 0.1571269  0.259922   0.0700592
  0.24262562 0.19527131 0.24691592 0.20379888 0.12243116 0.16406628
  0.1595747 ]
 [0.30752414 0.06263918 0.19793838 0.22662976 0.07297591 0.03407827
  0.4401719  0.74746376 0.0798358  0.19216941 0.24315701 0.08420305
  0.3508631  0.22537458 0.26620755 0.22013366 0.19780362 0.17121732
  0.18158635]
 [0.37618923 0.11889473 0.22164959 0.26059198 0.22894599 0.07365978
  0.5028893  0.8096367  0.17963374 0.23295425 0.30176476 0.12658536
  0.37288192 0.26464236 0.3229714  0.30841184 0.32389835 0.21721743
  0.22285175]
 [0.34759423 0.16025075 0.20989381 0.32193267 0.5481848  0.13359919
  0.6422608  0.9638243  0.40649772 0.15262242 0.35938305 0.12240791
  0.49396348 0.2320693  0.33684015 0.24759282 0.33463863 0.34307694
  0.25654384]
 [0.3733366  0.27567792 0.17916928 0.29748562 1.7681127  0.47420856
  0.8490732  1.2027919  0.5010611  0.34259936 0.42953324 0.10511427
  0.5007949  0.2516543  0.35222366 0.45503873 0.532249   0.45807508
  0.37718344]
 [0.68798745 0.69332826 0.29107228 0.5835146  2.4851656  0.71135116
  0.78396446 1.6826344  0.5193635  0.28807262 0.4901417  0.55724317
  0.32063025 0.60011363 0.62482655 1.0842488  0.5787504  0.4804851
  0.5709709 ]
 [0.71240664 0.73411846 0.26183116 0.7243383  2.0931664  0.5756701
  1.0803828  1.4651297  1.3684148  0.7936959  0.87504387 0.49939406
  0.6695094  1.0002356  0.8259016  1.3528061  0.7775632  1.0142685
  1.2789854 ]
 [0.8028436  0.6365167  0.7454466  0.6561247  1.4112034  0.48276556
  0.78732777 1.247141   0.5382274  0.7393651  0.90027064 0.7272925
  0.49370718 0.5488348  0.8162403  0.9527651  0.46385443 0.26638258
  0.4409911 ]
 [0.7171667  0.789111   0.7616689  0.72798526 1.4306375  0.5692378
  1.2573361  0.9016822  0.85898703 0.6169939  0.97037935 0.4100281
  0.5518301  0.83197474 0.62248313 0.77168435 0.82977736 0.46082708
  0.5736368 ]
 [0.49114776 0.83904815 0.68025446 0.81446826 1.167566   0.4695425
  0.569183   0.664816   0.59860957 0.6457331  0.9485626  0.14769931
  0.37529454 0.6137176  0.45271176 0.526132   0.4739288  0.22981003
  0.6530595 ]
 [0.5147682  0.79136205 0.90965044 0.7862969  0.83844745 0.6449022
  0.61270845 0.21326959 0.5675641  0.8655668  0.79307646 0.09681737
  0.3138379  0.84377575 0.35079205 0.3965806  0.35164762 0.44053164
  0.743634  ]
 [0.6920637  0.9565468  0.80646676 0.7053598  0.92836833 0.7188437
  0.69906306 0.85427403 0.47253105 0.834425   0.9084599  0.12885752
  0.48607513 0.6278037  0.46738786 0.6147021  0.42797112 0.88393915
  0.57193756]
 [0.7542299  0.73231125 0.43422565 0.7958166  0.15291294 1.0455391
  0.32511312 0.43760157 0.57651585 1.0716298  0.62389815 0.3194458
  0.61772364 0.91332376 0.5586087  0.28313696 0.5706319  2.0454564
  1.1582688 ]
 [0.53273076 0.9416251  0.72820896 0.8716376  0.15208578 0.6105906
  0.2421778  0.18794052 0.7055203  0.82869166 0.48790312 0.14127168
  0.3132181  0.76763666 0.3529148  0.25229734 0.48518047 0.65382826
  1.2839154 ]
 [0.5810301  0.7054416  0.6479677  0.8365227  0.0847149  0.5590246
  0.4001861  0.21966954 0.713084   0.9507784  0.4278767  0.15112269
  0.35742193 0.86345005 0.41329566 0.1913951  0.4832736  0.8717991
  1.2308304 ]
 [0.56342196 0.37560475 0.7498991  0.6971563  0.563432   0.40381798
  0.6590008  0.4657055  0.38101846 0.85937047 0.45641792 0.14418274
  0.18987408 0.88374805 0.37723088 0.45060873 0.4351032  0.34440562
  0.5870497 ]
 [0.5317534  0.36343294 0.7113033  0.70908225 0.6181358  0.54269695
  0.7990614  0.57116663 0.54188335 0.8751214  0.4653337  0.12435351
  0.3417474  0.93301105 0.33902675 0.39445588 0.44372478 0.5000949
  0.62670255]
 [0.53423524 0.5005225  0.71214426 0.7385154  0.53824115 0.5635836
  0.6112987  0.38436902 0.66837114 0.9871512  0.494758   0.16552427
  0.30451515 0.9313673  0.2993435  0.35142505 0.38605174 0.61564195
  0.80721736]
 [0.52253723 0.56809884 0.5852746  0.8159354  0.59985715 0.52410865
  0.6013127  0.21360587 0.7568456  0.94448125 0.54613054 0.21668684
  0.347264   0.9101027  0.44500607 0.3990383  0.4691651  0.64738137
  1.0884683 ]
 [0.50118655 0.49623936 0.46159428 0.7097254  0.74582523 0.60321736
  0.46548697 0.18976559 0.70751    0.8147461  0.60107    0.1715637
  0.5493077  0.68784904 0.31948367 0.38529104 0.5295094  0.66221654
  1.0451205 ]] (740, 48, 19)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.67 RMSE
Test Score: 0.47 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
from keras.layers.core import Dense, Activation, Dropout

model = Sequential()

model.add(LSTM(50, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))

model.add(Dense(trainX.shape[2]))

#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_17 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_35"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_17 (LSTM)               (None, 48, 50)            14000     
_________________________________________________________________
dense_84 (Dense)             (None, 48, 19)            969       
=================================================================
Total params: 14,969
Trainable params: 14,969
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C4 = model.fit(trainX, trainY, epochs=30, validation_split = 0.05, batch_size=32, callbacks=[lr_decay])
Epoch 1/30
101/101 [==============================] - 11s 113ms/step - loss: 0.4942 - mae: 0.5007 - val_loss: 0.2784 - val_mae: 0.3671
Epoch 2/30
101/101 [==============================] - 11s 109ms/step - loss: 0.3366 - mae: 0.4148 - val_loss: 0.2649 - val_mae: 0.3611
Epoch 3/30
101/101 [==============================] - 11s 113ms/step - loss: 0.3147 - mae: 0.3958 - val_loss: 0.2632 - val_mae: 0.3594
Epoch 4/30
101/101 [==============================] - 11s 110ms/step - loss: 0.3026 - mae: 0.3861 - val_loss: 0.2669 - val_mae: 0.3613
Epoch 5/30
101/101 [==============================] - 11s 112ms/step - loss: 0.2940 - mae: 0.3797 - val_loss: 0.2666 - val_mae: 0.3584
Epoch 6/30
101/101 [==============================] - 11s 110ms/step - loss: 0.2875 - mae: 0.3747 - val_loss: 0.2610 - val_mae: 0.3575
Epoch 7/30
101/101 [==============================] - 11s 110ms/step - loss: 0.2821 - mae: 0.3710 - val_loss: 0.2569 - val_mae: 0.3553
Epoch 8/30
101/101 [==============================] - 11s 111ms/step - loss: 0.2782 - mae: 0.3681 - val_loss: 0.2558 - val_mae: 0.3537
Epoch 9/30
101/101 [==============================] - 11s 111ms/step - loss: 0.2752 - mae: 0.3662 - val_loss: 0.2581 - val_mae: 0.3522
Epoch 10/30
101/101 [==============================] - 11s 112ms/step - loss: 0.2730 - mae: 0.3645 - val_loss: 0.2551 - val_mae: 0.3515
Epoch 11/30
101/101 [==============================] - 11s 113ms/step - loss: 0.2712 - mae: 0.3632 - val_loss: 0.2529 - val_mae: 0.3504
Epoch 12/30
101/101 [==============================] - 11s 112ms/step - loss: 0.2699 - mae: 0.3624 - val_loss: 0.2547 - val_mae: 0.3506
Epoch 13/30
101/101 [==============================] - 11s 111ms/step - loss: 0.2688 - mae: 0.3615 - val_loss: 0.2524 - val_mae: 0.3498
Epoch 14/30
101/101 [==============================] - 11s 111ms/step - loss: 0.2679 - mae: 0.3611 - val_loss: 0.2536 - val_mae: 0.3495
Epoch 15/30
101/101 [==============================] - 11s 109ms/step - loss: 0.2673 - mae: 0.3605 - val_loss: 0.2524 - val_mae: 0.3484
Epoch 16/30
101/101 [==============================] - 12s 115ms/step - loss: 0.2667 - mae: 0.3601 - val_loss: 0.2529 - val_mae: 0.3490
Epoch 17/30
101/101 [==============================] - 11s 112ms/step - loss: 0.2663 - mae: 0.3598 - val_loss: 0.2523 - val_mae: 0.3485
Epoch 18/30
101/101 [==============================] - 11s 108ms/step - loss: 0.2660 - mae: 0.3597 - val_loss: 0.2528 - val_mae: 0.3479
Epoch 19/30
101/101 [==============================] - 11s 111ms/step - loss: 0.2657 - mae: 0.3594 - val_loss: 0.2525 - val_mae: 0.3480
Epoch 20/30
101/101 [==============================] - 11s 112ms/step - loss: 0.2655 - mae: 0.3593 - val_loss: 0.2526 - val_mae: 0.3478
Epoch 21/30
101/101 [==============================] - 11s 112ms/step - loss: 0.2653 - mae: 0.3591 - val_loss: 0.2527 - val_mae: 0.3479
Epoch 22/30
101/101 [==============================] - 11s 110ms/step - loss: 0.2652 - mae: 0.3590 - val_loss: 0.2524 - val_mae: 0.3477
Epoch 23/30
101/101 [==============================] - 11s 112ms/step - loss: 0.2650 - mae: 0.3589 - val_loss: 0.2521 - val_mae: 0.3477
Epoch 24/30
101/101 [==============================] - 11s 110ms/step - loss: 0.2649 - mae: 0.3589 - val_loss: 0.2522 - val_mae: 0.3478
Epoch 25/30
101/101 [==============================] - 11s 113ms/step - loss: 0.2649 - mae: 0.3588 - val_loss: 0.2524 - val_mae: 0.3477
Epoch 26/30
101/101 [==============================] - 11s 110ms/step - loss: 0.2648 - mae: 0.3587 - val_loss: 0.2522 - val_mae: 0.3477
Epoch 27/30
101/101 [==============================] - 11s 111ms/step - loss: 0.2648 - mae: 0.3588 - val_loss: 0.2522 - val_mae: 0.3476
Epoch 28/30
101/101 [==============================] - 11s 109ms/step - loss: 0.2647 - mae: 0.3587 - val_loss: 0.2524 - val_mae: 0.3476
Epoch 29/30
101/101 [==============================] - 11s 111ms/step - loss: 0.2647 - mae: 0.3587 - val_loss: 0.2523 - val_mae: 0.3476
Epoch 30/30
101/101 [==============================] - 11s 110ms/step - loss: 0.2647 - mae: 0.3587 - val_loss: 0.2522 - val_mae: 0.3476
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C4.history['loss'], label='train')
plt.plot(history_C4.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C4 = model.predict(trainX, verbose = 1)
print(testingtrain_C4[0], testingtrain_C4.shape)

testingtest_C4 = model.predict(testX, verbose = 1)
print(testingtest_C4[0], testingtest_C4.shape)
106/106 [==============================] - 1s 11ms/step
[[ 4.29803461e-01  2.26358682e-01  2.36605674e-01  2.99072713e-01
   4.16010708e-01  1.62374377e-01  5.70669234e-01  6.22590899e-01
   4.79710340e-01  2.78170466e-01  3.22315156e-01  1.38628528e-01
   4.95511562e-01  3.32537889e-01  2.86609769e-01  2.54352987e-01
   2.97351032e-01  3.36402297e-01  3.39514554e-01]
 [ 4.98685181e-01  2.59500086e-01  2.27076545e-01  3.37320387e-01
   4.07464266e-01  1.63786694e-01  7.47998655e-01  8.32124531e-01
   4.84974205e-01  2.28480369e-01  3.17202866e-01  1.62191719e-01
   6.22031391e-01  3.16947371e-01  3.30520779e-01  3.17278236e-01
   2.98993707e-01  4.08453077e-01  3.31306070e-01]
 [ 5.91017783e-01  2.88871944e-01  2.22318858e-01  3.82580042e-01
   3.99395645e-01  1.65878370e-01  9.32163179e-01  1.05397499e+00
   4.95421261e-01  1.96287781e-01  3.26567799e-01  1.87122822e-01
   7.56446719e-01  3.01021427e-01  3.87932360e-01  4.06921029e-01
   3.64469230e-01  4.96682435e-01  3.42321545e-01]
 [ 5.83682477e-01  2.88130969e-01  2.31743440e-01  3.70655656e-01
   3.07661176e-01  1.59827262e-01  1.01024354e+00  1.14509571e+00
   4.70198780e-01  1.27191097e-01  2.62606561e-01  1.59503132e-01
   8.06338012e-01  2.60034174e-01  3.95595163e-01  3.48264486e-01
   3.38272572e-01  4.75204855e-01  2.89753377e-01]
 [ 5.67357361e-01  2.43300349e-01  2.15508923e-01  3.52877915e-01
   2.05903918e-01  1.55146316e-01  1.03379619e+00  1.17097104e+00
   4.23487157e-01  8.04058909e-02  2.07706660e-01  1.04853168e-01
   8.44019532e-01  2.13821396e-01  3.83881152e-01  2.87388057e-01
   3.07907999e-01  4.43879515e-01  2.49909431e-01]
 [ 5.36311448e-01  1.97068542e-01  2.33626381e-01  3.20146620e-01
   1.60557091e-01  1.89994052e-01  1.03609705e+00  1.13767838e+00
   4.05121565e-01  5.46989851e-02  1.66371271e-01  5.52308708e-02
   8.57930243e-01  1.88813522e-01  3.63610566e-01  2.04916835e-01
   2.82667756e-01  4.06364858e-01  2.27452636e-01]
 [ 5.10176659e-01  1.63411081e-01  2.37012103e-01  2.90207297e-01
   1.27143443e-01  2.00039461e-01  1.03993762e+00  1.11861980e+00
   3.96616906e-01  4.44737561e-02  1.37128174e-01  1.31220929e-02
   8.74932289e-01  1.76062033e-01  3.46472353e-01  1.59698978e-01
   2.86701351e-01  3.83604586e-01  2.20460862e-01]
 [ 4.91514504e-01  1.45258367e-01  2.42085010e-01  2.58216619e-01
   1.23142995e-01  2.14158013e-01  1.02463818e+00  1.09429598e+00
   3.96149039e-01  5.13002463e-02  1.26840532e-01  4.75489395e-03
   8.77641737e-01  1.76183507e-01  3.31931412e-01  1.37938783e-01
   2.93573529e-01  3.67058903e-01  2.34052688e-01]
 [ 4.84715611e-01  1.38120800e-01  2.62552381e-01  2.25431189e-01
   1.42031401e-01  2.30060384e-01  1.02749407e+00  1.09066296e+00
   4.21366483e-01  6.76928610e-02  1.22794978e-01  1.34118423e-02
   8.77364397e-01  1.96580991e-01  3.30747008e-01  1.19484276e-01
   3.51122379e-01  3.82552534e-01  2.44819760e-01]
 [ 4.76338357e-01  1.37718439e-01  2.48103842e-01  1.97992131e-01
   1.79736704e-01  2.21119717e-01  1.04271519e+00  1.11459589e+00
   4.43771183e-01  7.62615725e-02  1.08285718e-01 -2.58909073e-03
   8.63936424e-01  2.20339999e-01  3.17726225e-01  8.20639879e-02
   4.07000095e-01  4.30835456e-01  2.26849586e-01]
 [ 4.90622252e-01  1.43196881e-01  2.51922011e-01  1.86880082e-01
   2.06689358e-01  2.32732952e-01  1.07251549e+00  1.17778325e+00
   4.83519524e-01  1.06688410e-01  1.00054562e-01  4.46752459e-02
   8.71760905e-01  2.60350466e-01  3.38233024e-01  8.96994099e-02
   4.97091562e-01  4.92914289e-01  2.31413722e-01]
 [ 5.10800719e-01  2.32092172e-01  2.26400807e-01  2.10635543e-01
   2.84054101e-01  2.97638625e-01  1.11023736e+00  1.25698912e+00
   5.26712179e-01  1.02686971e-01  1.07921079e-01  1.70145884e-01
   8.80538821e-01  2.71541357e-01  3.08003068e-01  1.64063975e-01
   5.73942482e-01  6.16414309e-01  2.44979829e-01]
 [ 4.69292223e-01  2.19850570e-01  2.57171899e-01  1.74949855e-01
   2.70980358e-01  3.10756624e-01  1.10801053e+00  1.20516002e+00
   5.37284017e-01  1.58509344e-01  8.71853456e-02  1.64781511e-01
   8.34591508e-01  3.03310841e-01  3.38200748e-01  1.42821386e-01
   6.56840980e-01  5.90316355e-01  2.16067612e-01]
 [ 4.97272015e-01  2.25960076e-01  2.65527368e-01  1.79894328e-01
   2.97568381e-01  4.21358407e-01  1.18951571e+00  1.27920938e+00
   5.39448559e-01  2.58461356e-01  1.26781821e-01  2.98149019e-01
   8.50275218e-01  3.58853877e-01  4.58865494e-01  2.67281979e-01
   7.90755391e-01  6.31470323e-01  2.34123647e-01]
 [ 6.26392186e-01  3.02543163e-01  2.92639971e-01  2.49364138e-01
   5.07196009e-01  6.97569430e-01  1.40679252e+00  1.48067212e+00
   4.59820569e-01  4.53492790e-01  3.34414393e-01  5.80386579e-01
   9.02573287e-01  3.80997896e-01  7.16621935e-01  6.76223934e-01
   1.08900571e+00  7.27791309e-01  2.18149066e-01]
 [ 7.51859784e-01  4.12141800e-01  4.30545807e-01  3.39270741e-01
   7.50225902e-01  9.66670692e-01  1.56070459e+00  1.63985908e+00
   3.65648866e-01  6.97800517e-01  6.96075916e-01  8.16513658e-01
   9.33101773e-01  3.53039473e-01  1.05637956e+00  1.03079987e+00
   1.15269661e+00  7.37107038e-01  2.55530864e-01]
 [ 7.28836358e-01  5.42173207e-01  6.97602987e-01  3.44348848e-01
   1.10907149e+00  1.12127507e+00  1.58465505e+00  1.53948331e+00
   3.78497303e-01  8.80916417e-01  9.68277454e-01  8.77937078e-01
   9.06677246e-01  3.13393801e-01  1.03814912e+00  1.08177257e+00
   1.05482399e+00  6.48811400e-01  3.31350178e-01]
 [ 6.91238642e-01  9.30281103e-01  7.41866708e-01  5.33719540e-01
   1.36172235e+00  1.21971774e+00  1.67954075e+00  1.69484210e+00
   6.07057571e-01  8.41381788e-01  1.30662763e+00  1.12512171e+00
   9.63286042e-01  3.81587863e-01  1.01072228e+00  1.22080517e+00
   1.29408431e+00  8.54789019e-01  5.61617732e-01]
 [ 5.99852085e-01  1.22499979e+00  1.14260221e+00  3.52886021e-01
   1.80198193e+00  1.16453719e+00  1.54002476e+00  1.45430243e+00
   6.31087899e-01  7.45424092e-01  1.22473419e+00  7.59635925e-01
   6.95179582e-01  2.59172797e-01  5.32257795e-01  9.17544484e-01
   9.03333902e-01  7.45117545e-01  7.13308275e-01]
 [ 6.15706086e-01  1.28493369e+00  1.31634903e+00  4.71406817e-01
   2.24340034e+00  1.17719162e+00  1.70535064e+00  1.45135748e+00
   8.54630888e-01  6.88549817e-01  1.22001994e+00  5.24626255e-01
   6.73529744e-01  3.15091491e-01  3.63441169e-01  6.05172276e-01
   7.21462488e-01  8.28922153e-01  8.12682033e-01]
 [ 6.51587248e-01  1.19734907e+00  1.34741223e+00  4.74022418e-01
   2.20898819e+00  1.02374816e+00  1.94599259e+00  1.46202874e+00
   1.07258928e+00  5.25042534e-01  9.57512856e-01  3.48336995e-01
   7.59184122e-01  4.01094109e-01  2.63589174e-01  4.02442008e-01
   6.18013978e-01  7.54083216e-01  7.37708628e-01]
 [ 6.41923010e-01  1.14754355e+00  1.18634295e+00  6.03547633e-01
   2.16704893e+00  9.89724994e-01  2.06138706e+00  1.49497998e+00
   1.20314348e+00  4.90902573e-01  8.18256617e-01  3.73469949e-01
   8.94022524e-01  5.56200743e-01  4.73111004e-01  3.21694881e-01
   5.95159829e-01  6.98105693e-01  6.85710371e-01]
 [ 5.81170976e-01  9.34423923e-01  1.09953570e+00  6.00604832e-01
   2.12172556e+00  8.57314467e-01  2.00071311e+00  1.28269124e+00
   1.13125408e+00  5.58395505e-01  7.30697334e-01  3.25707346e-01
   8.22455585e-01  6.97465301e-01  3.94138157e-01  3.01057190e-01
   6.15534902e-01  5.58624268e-01  5.68227232e-01]
 [ 5.77012122e-01  7.81353354e-01  8.99984717e-01  7.20133603e-01
   2.02509189e+00  8.50547135e-01  1.86616862e+00  1.17225850e+00
   1.09933186e+00  6.45234644e-01  7.09678352e-01  3.41513306e-01
   7.86121786e-01  8.52367640e-01  5.10743380e-01  3.65036309e-01
   7.03441262e-01  5.62832117e-01  6.23633265e-01]
 [ 6.05256081e-01  8.39750826e-01  7.56776750e-01  7.66087234e-01
   1.97337520e+00  8.15357864e-01  1.66249025e+00  1.14064991e+00
   1.06540525e+00  6.09474659e-01  6.67833567e-01  3.62450361e-01
   7.42694497e-01  8.36536646e-01  3.63405585e-01  4.20944005e-01
   6.09644473e-01  5.91189265e-01  6.96692586e-01]
 [ 6.89340413e-01  9.03322399e-01  6.53982103e-01  9.30828631e-01
   1.99305975e+00  8.55993688e-01  1.56562805e+00  1.24737597e+00
   1.12929189e+00  6.71853364e-01  6.99807584e-01  4.08432961e-01
   8.37355494e-01  9.22017813e-01  3.60371083e-01  4.30565417e-01
   6.23480856e-01  7.36064672e-01  8.13958108e-01]
 [ 6.71729505e-01  9.77673650e-01  4.70224380e-01  9.84720170e-01
   1.93359709e+00  7.81943202e-01  1.42443335e+00  1.21348727e+00
   1.05273199e+00  6.66300893e-01  6.86609864e-01  3.72997910e-01
   8.72796714e-01  9.58649039e-01  2.50155002e-01  4.32506442e-01
   5.49882770e-01  7.10250735e-01  8.41565371e-01]
 [ 5.67272305e-01  9.05015171e-01  2.39370644e-01  9.46548939e-01
   1.89643443e+00  6.80262148e-01  1.22015762e+00  9.94746864e-01
   9.50081587e-01  5.71978450e-01  6.85342193e-01  2.30690166e-01
   8.50597620e-01  8.45557570e-01  1.27426162e-01  4.31820154e-01
   6.47876918e-01  5.89053154e-01  8.19781661e-01]
 [ 4.83180821e-01  7.78626025e-01  1.70117706e-01  9.09273744e-01
   1.74467051e+00  6.81619108e-01  1.10086787e+00  8.89071286e-01
   8.73744726e-01  5.43660283e-01  6.13966584e-01  2.06876397e-01
   8.22828174e-01  7.70053267e-01  1.35140017e-01  4.57447797e-01
   6.72520339e-01  4.33980882e-01  7.98353553e-01]
 [ 5.05289316e-01  6.12270176e-01  2.66336828e-01  9.17723656e-01
   1.59287024e+00  7.52803445e-01  9.98287559e-01  7.84661949e-01
   9.37023342e-01  6.21414959e-01  6.39510572e-01  2.64620543e-01
   8.15139055e-01  7.37856269e-01  1.96087316e-01  4.75742400e-01
   6.57368720e-01  3.69838297e-01  8.81071568e-01]
 [ 5.73778510e-01  5.29024124e-01  2.97911316e-01  1.02446175e+00
   1.69200361e+00  7.83154547e-01  9.86800075e-01  7.79331446e-01
   1.11834061e+00  6.72064245e-01  7.28394866e-01  2.77596146e-01
   8.51902843e-01  8.09602857e-01  3.02760273e-01  5.12265027e-01
   7.78556824e-01  4.54278231e-01  1.02878487e+00]
 [ 6.85155034e-01  5.61079443e-01  2.71419644e-01  1.10757756e+00
   1.85056829e+00  7.97059476e-01  9.29665804e-01  8.29066873e-01
   1.35400093e+00  6.75402880e-01  8.43917429e-01  3.48597080e-01
   9.18149948e-01  8.22968483e-01  3.84703636e-01  5.92795670e-01
   1.00828052e+00  6.12135291e-01  1.15141368e+00]
 [ 7.87683725e-01  7.22531199e-01  1.02592014e-01  1.10684025e+00
   2.10393476e+00  9.04479861e-01  8.97897840e-01  1.09826708e+00
   1.45294046e+00  5.78647137e-01  9.02244687e-01  4.30430710e-01
   9.49747205e-01  8.11631799e-01  4.09569621e-01  8.27912390e-01
   1.17663562e+00  6.94894612e-01  1.32187366e+00]
 [ 8.56247127e-01  8.09474349e-01  7.56875798e-02  1.07955396e+00
   2.29192686e+00  1.06454980e+00  9.10876632e-01  1.24411678e+00
   1.62206483e+00  6.22442305e-01  1.00759161e+00  6.14451945e-01
   1.02877784e+00  8.32538843e-01  5.22058308e-01  9.81608748e-01
   1.27733231e+00  7.00337470e-01  1.43641484e+00]
 [ 9.72967267e-01  1.06361306e+00  7.87436590e-02  1.05442297e+00
   2.55525970e+00  1.28783083e+00  1.06127560e+00  1.62490046e+00
   1.93159449e+00  7.51414239e-01  1.10346615e+00  1.08048022e+00
   1.10254753e+00  1.00748432e+00  7.89634466e-01  1.32563031e+00
   1.47758234e+00  8.03975403e-01  1.59552610e+00]
 [ 1.10797668e+00  1.52839172e+00  5.69177642e-02  1.15733838e+00
   2.74591255e+00  1.42547214e+00  1.19847524e+00  2.25762415e+00
   2.24646425e+00  7.82165885e-01  1.11645818e+00  1.66707218e+00
   1.23357153e+00  1.20356095e+00  1.13178992e+00  1.80321503e+00
   1.69508123e+00  1.07471967e+00  1.79241252e+00]
 [ 9.94723797e-01  1.42647481e+00  8.41163695e-02  1.18524492e+00
   2.62532592e+00  1.28715730e+00  1.32704294e+00  2.15290546e+00
   2.24386573e+00  8.88730884e-01  1.23127139e+00  1.58116186e+00
   1.30036175e+00  1.29159355e+00  1.37505007e+00  1.56221378e+00
   1.48595178e+00  1.05543983e+00  1.69795680e+00]
 [ 1.02846920e+00  1.64027226e+00  2.31299952e-01  1.49727738e+00
   2.82464838e+00  1.31910622e+00  1.55761254e+00  2.34366679e+00
   2.73764873e+00  1.28460264e+00  1.36537290e+00  1.91264832e+00
   1.33015561e+00  1.61969173e+00  1.75548947e+00  1.64839804e+00
   1.62821746e+00  1.57955480e+00  1.74580467e+00]
 [ 9.95021999e-01  1.41875482e+00  2.36959323e-01  1.42396331e+00
   2.99754620e+00  1.20730472e+00  1.67581320e+00  2.22161651e+00
   2.55551863e+00  1.25238538e+00  1.39523208e+00  1.87917960e+00
   1.23352981e+00  1.52933061e+00  1.55747354e+00  1.56099510e+00
   1.46945763e+00  1.69628298e+00  1.57486641e+00]
 [ 1.01354849e+00  1.04582512e+00  1.71098769e-01  1.43140674e+00
   2.94737220e+00  1.14772964e+00  1.71188247e+00  1.92166030e+00
   2.52848053e+00  1.20156121e+00  1.53354979e+00  1.63598323e+00
   1.25988734e+00  1.48414981e+00  1.49418700e+00  1.17745697e+00
   1.29604590e+00  1.73552680e+00  1.53154910e+00]
 [ 1.07474542e+00  1.15739787e+00  1.73438281e-01  1.44915915e+00
   2.98720026e+00  1.25326180e+00  1.81117511e+00  1.84018803e+00
   2.77842975e+00  1.18691015e+00  1.73607242e+00  1.72852778e+00
   1.09350920e+00  1.41938066e+00  1.44408691e+00  1.03434086e+00
   1.41581035e+00  2.11777186e+00  1.60404050e+00]
 [ 1.13888872e+00  1.09888780e+00  3.77342820e-01  1.43665373e+00
   3.10762858e+00  1.18240547e+00  1.95153940e+00  1.74811852e+00
   2.97540736e+00  1.19103265e+00  1.84005439e+00  1.63860083e+00
   1.09773743e+00  1.42627180e+00  1.37646735e+00  9.00481880e-01
   1.50579154e+00  2.32069492e+00  1.63634372e+00]
 [ 1.00026715e+00  8.65490019e-01  5.37338734e-01  1.42543423e+00
   3.03101420e+00  9.61458325e-01  2.11156583e+00  1.66408384e+00
   2.84598422e+00  1.12796783e+00  1.71744251e+00  1.46123028e+00
   1.27913439e+00  1.44961417e+00  1.35707390e+00  6.99695349e-01
   1.43370974e+00  2.13140893e+00  1.42216420e+00]
 [ 9.65882838e-01  7.23807216e-01  5.70405483e-01  1.33451152e+00
   2.84557366e+00  7.56008089e-01  2.14279723e+00  1.62887824e+00
   2.73068404e+00  9.80936646e-01  1.54836118e+00  1.30328822e+00
   1.34119833e+00  1.38133073e+00  1.27243876e+00  5.24638236e-01
   1.43529761e+00  1.95529199e+00  1.19710946e+00]
 [ 9.90763783e-01  6.23671472e-01  5.28777778e-01  1.22634995e+00
   2.72213030e+00  6.29221022e-01  2.22570419e+00  1.61505389e+00
   2.63120151e+00  8.55429053e-01  1.51329672e+00  1.05248189e+00
   1.38650334e+00  1.27771819e+00  1.21806777e+00  3.17523152e-01
   1.48980641e+00  1.94584382e+00  1.01581645e+00]
 [ 7.46988297e-01  3.58040124e-01  6.07544541e-01  1.14599681e+00
   2.26052809e+00  2.81283319e-01  2.10535049e+00  1.35301554e+00
   2.07234526e+00  6.14569485e-01  1.12006581e+00  5.70439994e-01
   1.43328750e+00  1.02691209e+00  9.95221198e-01 -4.23747674e-03
   1.08016062e+00  1.25024426e+00  5.80727816e-01]
 [ 5.94438970e-01  5.19122295e-02  6.58553958e-01  9.79331851e-01
   1.53306413e+00  1.34810582e-01  1.90141988e+00  1.27824974e+00
   1.58237326e+00  4.15239573e-01  7.35039175e-01  3.00828427e-01
   1.44034982e+00  8.22646260e-01  7.99618602e-01 -1.30108461e-01
   7.30684459e-01  8.15941215e-01  3.87786776e-01]
 [ 4.73440230e-01 -2.55335420e-02  6.95325434e-01  8.61603677e-01
   9.83722866e-01  1.26710072e-01  1.57418203e+00  1.22748244e+00
   1.13784134e+00  3.57678860e-01  4.99368191e-01  2.71099061e-01
   1.25059843e+00  7.09991217e-01  5.75945020e-01 -9.76538658e-02
   4.88127619e-01  6.37814045e-01  3.74074787e-01]] (3388, 48, 19)
24/24 [==============================] - 0s 11ms/step
[[0.40084422 0.4398781  0.33819342 0.47150353 0.61068225 0.53320986
  0.6349968  0.6051528  0.50965536 0.41354707 0.46022215 0.39518866
  0.24762553 0.44073376 0.42997533 0.2522473  0.3796214  0.47630817
  0.42299104]
 [0.49053353 0.46749994 0.4110492  0.554529   0.7054503  0.5814145
  0.6474726  0.71177435 0.5578153  0.43679824 0.49415264 0.43271363
  0.33261105 0.47891486 0.4688264  0.3628291  0.50816715 0.5590523
  0.5607503 ]
 [0.5455011  0.4586098  0.4503538  0.5864297  0.6832736  0.6278416
  0.5910687  0.7690283  0.5851332  0.45286605 0.50030684 0.48591673
  0.3937313  0.5053266  0.5169943  0.4469682  0.60976607 0.5947933
  0.6793281 ]
 [0.62893224 0.53368556 0.4882483  0.65363914 0.82063985 0.6725058
  0.61637944 0.8840844  0.62695843 0.50007224 0.5597957  0.4844611
  0.43352896 0.5172188  0.5609496  0.52957326 0.6722231  0.6096643
  0.75174826]
 [0.6236129  0.6108334  0.5905473  0.6782115  0.9477967  0.6516059
  0.7924578  0.9325586  0.6282351  0.554333   0.6327601  0.4754488
  0.58124846 0.6366271  0.52539057 0.51507396 0.632051   0.5199086
  0.7252153 ]
 [0.6226674  0.7394116  0.5287777  0.6811826  1.0418458  0.7752351
  0.84393495 0.94920033 0.620048   0.6081247  0.7327134  0.63214904
  0.5992594  0.7382027  0.48226428 0.68144983 0.5692457  0.53950524
  0.849382  ]
 [0.9110009  1.0482255  0.33364302 0.80044025 1.3011076  1.1977696
  0.91024655 1.4094946  0.9115565  0.82613444 0.9472922  1.121433
  0.73245394 0.93308103 0.917348   1.2160169  0.9945995  0.8535418
  1.2812662 ]
 [1.1772422  1.1323125  0.5123294  0.97223496 1.5178894  1.5838683
  1.0750386  1.7695045  1.5485054  1.0052742  1.1156104  1.5495932
  1.0601311  1.1040705  1.3459216  1.5449629  1.4505277  1.0450814
  1.6144729 ]
 [1.2159986  1.2811891  0.8720426  1.2860811  1.6304647  1.5877573
  1.3361012  2.0174472  1.9236264  1.3934207  1.1191292  1.8678333
  1.1524208  1.4999071  1.7930237  1.5094262  1.6144503  1.1983043
  1.590518  ]
 [1.1504893  1.3361517  1.0799308  1.2935654  1.7555698  1.2845782
  1.3519881  1.8435786  2.2687745  1.5219367  1.1277995  1.8285898
  1.2440882  1.6458496  1.6560918  1.3122197  1.785073   0.99593127
  1.6312207 ]
 [1.0501944  1.3107003  1.1666545  1.4366397  1.777144   1.0102232
  1.3853469  1.6932846  2.3420358  1.5910835  1.2035005  1.792753
  1.1154497  1.729626   1.6093119  1.1514727  1.7698414  1.1833689
  1.4468615 ]
 [0.9344911  1.3575879  1.0511683  1.5497509  2.2067015  0.9771228
  1.4940498  1.5843561  2.2594101  1.7978168  1.4201648  1.6446863
  0.9092602  1.8328094  1.6359974  1.0752337  1.8193238  1.6399894
  1.3558022 ]
 [0.78886306 1.144458   1.0048995  1.4266307  2.2053802  0.78364456
  1.2376125  1.2906594  1.9833274  1.5950401  1.3544612  1.4607297
  0.733726   1.6097248  1.1807423  0.92176193 1.6485289  1.6721654
  1.2445959 ]
 [0.75664556 0.9748033  0.95487785 1.3459615  2.137505   0.7568173
  1.2234455  1.1538885  1.8143401  1.481271   1.3388703  1.2914027
  0.62774247 1.4410882  1.0486935  0.7447779  1.4800164  1.676046
  1.1455863 ]
 [0.78223944 0.9052825  0.87578636 1.3006122  2.0453575  0.7761947
  1.1266491  1.1375638  1.8032776  1.2807305  1.3326372  1.1670465
  0.63317287 1.2536482  0.9803493  0.66038144 1.4924377  1.6859347
  1.207531  ]
 [0.6911386  0.7469807  0.79475594 1.1255469  1.6225371  0.63831514
  0.9881367  1.0502163  1.4481725  0.88210344 1.1423441  0.9696048
  0.6701678  0.953086   0.8131439  0.5844276  1.2335211  1.2563975
  1.0967473 ]
 [0.5275333  0.54164374 0.899287   1.0323161  1.2226193  0.58435696
  0.9186138  0.83725655 1.1361204  0.85271007 0.92659765 0.74876434
  0.6764612  0.84374523 0.7221898  0.41617128 0.90708697 0.834925
  0.9570125 ]
 [0.39386415 0.38081267 0.8953239  0.92313063 0.8858581  0.46679336
  0.7987353  0.6584957  0.8724586  0.7778847  0.75034666 0.59962904
  0.64639795 0.66431534 0.55963606 0.24281897 0.6374806  0.64297855
  0.7809    ]
 [0.35744223 0.26507398 0.8244294  0.8106418  0.69138354 0.3805048
  0.7584757  0.55264133 0.70997286 0.6589957  0.63721573 0.41984785
  0.6217182  0.5297593  0.43231878 0.14809881 0.48352483 0.46568242
  0.6743014 ]
 [0.30688792 0.20542097 0.74487954 0.71474254 0.54621726 0.28970197
  0.6922722  0.47937614 0.5725384  0.5609982  0.54615843 0.28298318
  0.621149   0.4227542  0.34105104 0.0638629  0.3791697  0.35139608
  0.5768831 ]
 [0.30079767 0.16659331 0.64096135 0.62556857 0.44246402 0.22553158
  0.62611914 0.4502235  0.47393054 0.45280173 0.4801857  0.20588964
  0.5983654  0.33898717 0.2591955  0.06304616 0.2929145  0.26944003
  0.52266777]
 [0.3011405  0.15698704 0.5346322  0.53378975 0.37571812 0.16470778
  0.5849917  0.46200323 0.41003203 0.36986378 0.4301589  0.15462889
  0.5865018  0.2863231  0.22455128 0.06994572 0.25374237 0.22485647
  0.4725709 ]
 [0.28923196 0.16964754 0.45583242 0.46596575 0.33516246 0.11577552
  0.53094167 0.46976018 0.35098666 0.29948583 0.38598734 0.12864509
  0.5484831  0.2497236  0.1946516  0.08428597 0.22073962 0.21248272
  0.42770007]
 [0.28719032 0.17581886 0.38641518 0.40709952 0.30374327 0.08618817
  0.5018574  0.49498147 0.30880645 0.25195464 0.3492296  0.1088863
  0.5362555  0.2298967  0.19329794 0.09678126 0.1990342  0.19598284
  0.39684558]
 [0.29188114 0.19524518 0.33959043 0.3689262  0.3028031  0.08151348
  0.47539204 0.52313876 0.2822215  0.2221507  0.3283444  0.11029016
  0.5134978  0.22227208 0.19763847 0.11945693 0.18951996 0.20420578
  0.38340852]
 [0.31265044 0.21341813 0.312841   0.3433636  0.31266314 0.08231186
  0.48024836 0.5560029  0.26373404 0.21067029 0.31372774 0.11866045
  0.5119658  0.2317451  0.21664795 0.16701631 0.21455851 0.21697792
  0.37651017]
 [0.29835063 0.22280857 0.27789265 0.30913863 0.2955205  0.05597311
  0.45931926 0.56491446 0.24289739 0.18453802 0.2896342  0.1011111
  0.48554328 0.22290574 0.21049877 0.16259234 0.19412854 0.20914814
  0.34376642]
 [0.31064633 0.23772976 0.24969739 0.28124687 0.28703085 0.05524461
  0.45483714 0.59526694 0.23946142 0.16996086 0.2818596  0.11017811
  0.48716947 0.22799714 0.23085894 0.18278272 0.2073178  0.2223981
  0.34252378]
 [0.28918976 0.24753857 0.2297677  0.24579315 0.27142006 0.02985682
  0.44316941 0.5855147  0.19747731 0.15165712 0.2610423  0.09243278
  0.46448332 0.21254818 0.21907261 0.18312657 0.1934627  0.2007178
  0.29547235]
 [0.28066808 0.25753325 0.20815766 0.21744847 0.25970837 0.01552516
  0.4361911  0.5922069  0.16363043 0.14210054 0.2464796  0.08480156
  0.4546052  0.20766473 0.21698199 0.19700095 0.18303688 0.18401176
  0.27297264]
 [0.2686465  0.2773124  0.1866841  0.18847059 0.25386977 0.01104197
  0.432971   0.59130216 0.14367503 0.14145447 0.23814529 0.08435491
  0.430946   0.21037897 0.2243143  0.21275528 0.18854238 0.16944596
  0.2486107 ]
 [0.33484146 0.3131794  0.16625756 0.1993026  0.31107768 0.08213875
  0.50160736 0.68195367 0.14492738 0.19778064 0.29158935 0.15191028
  0.43409535 0.22524133 0.27405548 0.3040468  0.24520352 0.21883139
  0.25351518]
 [0.5167642  0.48765698 0.21407032 0.29927114 0.66182226 0.28754485
  0.6919302  0.95119685 0.14317906 0.31744626 0.48614386 0.30145139
  0.4511958  0.22544341 0.43175447 0.5978375  0.35137537 0.34867355
  0.31698886]
 [0.72693306 0.6920147  0.43163496 0.4704279  1.1430533  0.51575536
  0.93913287 1.1372973  0.18163109 0.47392103 0.7437126  0.40237716
  0.5306781  0.27675956 0.5125984  0.92992777 0.42985982 0.38340122
  0.41838723]
 [0.82192373 0.9321175  0.59548146 0.52929807 1.2305624  0.6379717
  1.009449   1.333882   0.29090384 0.4839193  0.7566965  0.5787363
  0.5077657  0.3242645  0.58933663 1.1574533  0.5415812  0.3357962
  0.5505753 ]
 [0.98457056 1.4115139  0.7727941  0.82244754 1.493954   0.99542904
  1.237933   1.7944577  0.5670996  0.7357085  0.9283719  1.0344983
  0.55519915 0.59288716 1.017015   1.5257128  0.6857109  0.55336547
  0.871127  ]
 [0.8125569  1.2958685  1.0356808  0.934449   1.7271688  1.002998
  1.3714169  1.5258062  0.49264413 0.87095886 1.059889   1.0734946
  0.5796263  0.88302267 1.0018958  1.4258864  0.8867657  0.7395526
  0.7620062 ]
 [0.5520773  1.0000635  1.1923276  0.70291805 1.2569455  0.68864584
  0.98505044 0.879265   0.40848997 0.7769787  0.8732887  0.68120736
  0.4726317  0.7725147  0.5509127  0.8043751  0.74808466 0.62649465
  0.68784595]
 [0.402467   0.87832814 1.2534065  0.70350325 1.0546851  0.62618303
  0.81633526 0.64052933 0.30076712 0.86731905 0.7231899  0.4335058
  0.36110705 0.78264296 0.44417068 0.50754446 0.52985126 0.5415767
  0.64338005]
 [0.3471875  0.7809768  1.1644008  0.6827121  0.8824986  0.5665396
  0.64285076 0.57777065 0.21122056 0.8839323  0.625216   0.29520667
  0.35471687 0.78220165 0.40270987 0.35683703 0.30160043 0.55936664
  0.6391131 ]
 [0.43888727 0.68808436 1.0377352  0.76817983 0.8041239  0.66106933
  0.58017844 0.6346542  0.28233555 0.9180571  0.5659813  0.31376198
  0.37424082 0.8432933  0.47541532 0.42441633 0.26428637 0.62079877
  0.79365724]
 [0.43654665 0.6153088  0.97589636 0.74133044 0.66555184 0.6044793
  0.46144292 0.5634508  0.32592836 0.921371   0.48751304 0.2793627
  0.3725899  0.80471694 0.4043001  0.38878897 0.23593947 0.5442698
  0.7824971 ]
 [0.43019712 0.5140115  0.89588517 0.6936922  0.53631544 0.5566436
  0.40326384 0.47421807 0.40581566 0.91097766 0.46454415 0.21434721
  0.42092738 0.7659024  0.38326758 0.33256808 0.2750392  0.4511749
  0.76657116]
 [0.3776522  0.41881546 0.8383345  0.60584444 0.39994946 0.4666286
  0.35083166 0.3966208  0.40062398 0.8453499  0.4238887  0.13799766
  0.46779287 0.6550472  0.34631327 0.27366564 0.36302605 0.33916956
  0.6337478 ]
 [0.38732237 0.39635795 0.7416236  0.6116912  0.36878213 0.48903158
  0.37383524 0.4027002  0.44567844 0.768577   0.43251792 0.13456027
  0.47944003 0.60365    0.36426982 0.30270463 0.45090178 0.3605229
  0.6034137 ]
 [0.4087745  0.4116873  0.69184285 0.65201217 0.39524585 0.5509681
  0.39795884 0.4502889  0.53862226 0.7617741  0.42605004 0.1977507
  0.48184118 0.6532613  0.42095745 0.3966725  0.5389572  0.3994529
  0.64524126]
 [0.42650643 0.40956116 0.62685174 0.6601527  0.38912416 0.5438193
  0.39087126 0.4613384  0.6021308  0.6976314  0.4446613  0.21874061
  0.5042703  0.61503255 0.42614734 0.40281925 0.57936496 0.43921545
  0.67357934]
 [0.43127003 0.41247562 0.5699715  0.664134   0.38601628 0.5836688
  0.3800128  0.47551966 0.6188444  0.6513263  0.48094755 0.293672
  0.52497303 0.5455427  0.4488543  0.41048035 0.6171345  0.46208394
  0.69304556]] (740, 48, 19)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C4[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C4[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.62 RMSE
Test Score: 0.46 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C4[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(200)]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:200,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C4[:200,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
###########SUMMER
km_4_SU = KMeans(4).fit(X_SU)
pd.Series.value_counts(km_4_SU.labels_)
Out[ ]:
1    157
0    103
3     38
2      1
dtype: int64
In [ ]:
X_SU['cluster'] = km_4_SU.labels_
In [ ]:
X_SU_C1 = X_SU[X_SU.cluster == 0]
X_SU_C2 = X_SU[X_SU.cluster == 1]
X_SU_C3 = X_SU[X_SU.cluster == 2]
X_SU_C4 = X_SU[X_SU.cluster == 3]
#X_A_C5 = X_A[X_A.cluster == 4]
#X_A_C6 = X_A[X_A.cluster == 5]
In [ ]:
X_SU_C1 = X_SU_C1.drop(columns = 'cluster')
X_SU_C1 = X_SU_C1.transpose()
X_SU_C1.head()
Out[ ]:
1 3 4 5 7 9 10 13 14 15 18 19 22 23 26 27 28 29 30 31 32 34 36 37 39 40 41 43 44 45 48 50 52 53 60 64 66 71 72 73 ... 227 231 232 233 235 236 237 238 239 240 241 244 245 247 252 258 259 260 261 263 264 266 268 269 270 272 273 277 278 280 282 283 284 288 291 292 295 296 297 298
Datetime
2012-12-01 00:00:00 0.202 0.071 0.260 0.118 0.147 0.071 0.187 0.163 0.158 0.085 0.084 0.144 0.078 0.127 0.350 0.270 0.106 0.159 0.014 0.078 0.071 0.256 0.139 0.424 0.127 0.066 0.282 0.106 0.108 0.124 0.113 0.063 1.874 0.051 0.077 0.088 0.088 0.085 0.056 0.205 ... 0.196 0.207 0.500 0.098 0.141 0.131 0.138 0.237 0.149 0.066 0.114 0.256 0.103 0.232 0.098 0.139 0.091 2.389 0.199 0.194 0.055 0.581 0.182 1.163 0.224 0.110 0.047 0.110 0.167 0.273 0.131 0.062 0.083 0.210 0.401 0.094 0.199 0.064 0.268 0.116
2012-12-01 00:30:00 0.151 0.051 0.288 0.121 0.089 0.065 0.121 0.155 0.173 0.041 0.113 0.125 0.075 0.117 0.813 0.260 0.113 0.135 0.067 0.077 0.078 0.219 0.122 0.289 0.124 0.150 0.264 0.123 0.154 0.073 0.175 0.013 0.804 0.084 0.105 0.051 0.075 0.069 0.019 0.200 ... 0.174 0.155 0.469 0.543 0.123 0.125 0.050 0.225 0.148 0.056 0.123 0.275 0.078 0.213 0.184 0.125 0.088 2.406 0.399 0.170 0.046 0.044 0.148 1.164 0.231 0.087 0.103 0.130 0.186 0.180 0.088 0.051 0.123 0.073 0.431 0.056 0.237 0.050 0.309 0.115
2012-12-01 01:00:00 0.254 0.054 0.254 0.143 0.099 0.102 0.199 0.107 0.200 0.086 0.082 0.181 0.077 0.173 0.363 0.221 0.113 0.106 0.014 0.078 0.058 0.244 0.127 0.236 0.122 0.162 0.530 0.069 0.126 0.083 0.150 0.069 0.069 0.090 0.078 0.078 0.075 0.054 0.050 0.205 ... 0.062 0.143 0.456 0.587 0.108 0.138 0.138 0.344 0.202 0.073 0.123 0.256 0.087 0.147 0.182 0.113 0.087 2.383 1.280 0.168 0.044 0.056 0.173 1.263 0.219 0.093 0.130 0.209 0.182 0.179 0.094 0.073 0.077 0.070 0.404 0.094 0.160 0.049 0.265 0.120
2012-12-01 01:30:00 0.250 0.050 0.259 0.131 0.066 0.034 0.121 0.052 0.147 0.107 0.108 0.169 0.076 0.123 0.313 0.185 0.100 0.057 0.060 0.079 0.063 0.181 0.124 0.232 0.120 0.079 0.871 0.117 0.119 0.126 0.150 0.025 0.060 0.059 0.052 0.072 0.088 0.117 0.025 0.197 ... 0.054 0.180 0.444 0.120 0.118 0.138 0.050 0.250 0.159 0.170 0.111 0.213 0.061 0.178 0.173 0.288 0.088 2.397 0.924 0.167 0.054 0.063 0.159 1.175 0.212 0.101 0.069 0.169 0.162 0.156 0.156 0.041 0.128 0.454 0.319 0.050 0.173 0.060 0.264 0.128
2012-12-01 02:00:00 0.149 0.053 0.167 0.114 0.109 0.075 0.186 0.051 0.153 0.061 0.088 0.113 0.075 0.142 0.263 0.248 0.113 0.133 0.026 0.071 0.078 0.206 0.094 0.261 0.121 0.070 0.906 0.090 0.156 0.079 0.294 0.050 0.060 0.076 0.087 0.051 0.088 0.116 0.056 0.193 ... 0.095 0.126 0.263 0.094 0.114 0.138 0.131 0.231 0.132 0.159 0.115 0.206 0.104 0.176 0.273 0.138 0.056 2.412 0.364 0.166 0.042 0.544 0.107 1.189 0.220 0.099 0.056 0.186 0.144 0.156 0.113 0.082 0.069 0.076 0.282 0.088 0.134 0.060 0.292 0.105

5 rows × 167 columns

In [ ]:
X_SU_C1 = X_SU_C1.values
cap = np.percentile(X_SU_C1, 97)   
X_SU_C1[X_SU_C1 > cap] = cap
###########
#scaler = MinMaxScaler(feature_range=(0, 1))
#Xtrain = scaler.fit(Xtrain.reshape(0, 1))
#X_A_C1 = scaler.fit_transform(X_A_C1)
In [ ]:
training_size=int(X_SU_C1.shape[0]*0.80)

test_size=(X_SU_C1.shape[0])-training_size

train,test=X_SU_C1[0:training_size],X_SU_C1[training_size:(X_SU_C1.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(3312, 48, 167) (3312, 48, 167)
(720, 48, 167) (720, 48, 167)
In [ ]:
                             ###Building a sequential network:
Model_1 = models.Sequential()
Model_1.add(layers.Dense(600, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_1.add(Dropout(0.2))
Model_1.add(BatchNormalization())
Model_1.add(layers.Dense(300, activation='relu'))
Model_1.add(Dropout(0.2))
Model_1.add(BatchNormalization())

Model_1.add((Dense(trainX.shape[2])))
Model_1.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_1.summary()
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense (Dense)                (None, 48, 600)           100800    
_________________________________________________________________
dropout (Dropout)            (None, 48, 600)           0         
_________________________________________________________________
batch_normalization (BatchNo (None, 48, 600)           2400      
_________________________________________________________________
dense_1 (Dense)              (None, 48, 300)           180300    
_________________________________________________________________
dropout_1 (Dropout)          (None, 48, 300)           0         
_________________________________________________________________
batch_normalization_1 (Batch (None, 48, 300)           1200      
_________________________________________________________________
dense_2 (Dense)              (None, 48, 167)           50267     
=================================================================
Total params: 334,967
Trainable params: 333,167
Non-trainable params: 1,800
_________________________________________________________________
In [ ]:
model_train = Model_1.fit(trainX,trainY, epochs=30, batch_size = 32, validation_split = 0.10)
Epoch 1/30
94/94 [==============================] - 1s 9ms/step - loss: 0.3190 - mae: 0.3786 - val_loss: 0.0203 - val_mae: 0.0986
Epoch 2/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0667 - mae: 0.1584 - val_loss: 0.0197 - val_mae: 0.0982
Epoch 3/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0335 - mae: 0.1155 - val_loss: 0.0190 - val_mae: 0.0956
Epoch 4/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0231 - mae: 0.0984 - val_loss: 0.0180 - val_mae: 0.0926
Epoch 5/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0196 - mae: 0.0914 - val_loss: 0.0173 - val_mae: 0.0889
Epoch 6/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0182 - mae: 0.0882 - val_loss: 0.0167 - val_mae: 0.0847
Epoch 7/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0174 - mae: 0.0864 - val_loss: 0.0165 - val_mae: 0.0826
Epoch 8/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0169 - mae: 0.0852 - val_loss: 0.0164 - val_mae: 0.0815
Epoch 9/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0164 - mae: 0.0840 - val_loss: 0.0165 - val_mae: 0.0832
Epoch 10/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0159 - mae: 0.0829 - val_loss: 0.0164 - val_mae: 0.0828
Epoch 11/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0154 - mae: 0.0817 - val_loss: 0.0166 - val_mae: 0.0839
Epoch 12/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0149 - mae: 0.0805 - val_loss: 0.0164 - val_mae: 0.0826
Epoch 13/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0145 - mae: 0.0797 - val_loss: 0.0165 - val_mae: 0.0842
Epoch 14/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0140 - mae: 0.0784 - val_loss: 0.0168 - val_mae: 0.0836
Epoch 15/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0136 - mae: 0.0775 - val_loss: 0.0168 - val_mae: 0.0852
Epoch 16/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0133 - mae: 0.0767 - val_loss: 0.0169 - val_mae: 0.0859
Epoch 17/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0129 - mae: 0.0759 - val_loss: 0.0169 - val_mae: 0.0847
Epoch 18/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0127 - mae: 0.0753 - val_loss: 0.0171 - val_mae: 0.0854
Epoch 19/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0124 - mae: 0.0745 - val_loss: 0.0172 - val_mae: 0.0858
Epoch 20/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0121 - mae: 0.0738 - val_loss: 0.0171 - val_mae: 0.0856
Epoch 21/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0119 - mae: 0.0733 - val_loss: 0.0174 - val_mae: 0.0861
Epoch 22/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0117 - mae: 0.0730 - val_loss: 0.0172 - val_mae: 0.0856
Epoch 23/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0115 - mae: 0.0725 - val_loss: 0.0174 - val_mae: 0.0865
Epoch 24/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0113 - mae: 0.0720 - val_loss: 0.0173 - val_mae: 0.0861
Epoch 25/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0112 - mae: 0.0717 - val_loss: 0.0173 - val_mae: 0.0863
Epoch 26/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0110 - mae: 0.0714 - val_loss: 0.0174 - val_mae: 0.0853
Epoch 27/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0109 - mae: 0.0711 - val_loss: 0.0175 - val_mae: 0.0868
Epoch 28/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0108 - mae: 0.0707 - val_loss: 0.0175 - val_mae: 0.0869
Epoch 29/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0106 - mae: 0.0704 - val_loss: 0.0176 - val_mae: 0.0862
Epoch 30/30
94/94 [==============================] - 1s 6ms/step - loss: 0.0106 - mae: 0.0703 - val_loss: 0.0176 - val_mae: 0.0868
In [ ]:
Seq_train = Model_1.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_1.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
104/104 [==============================] - 0s 4ms/step
[[0.21481101 0.08356421 0.14668901 ... 0.09612989 0.1985003  0.12148587]
 [0.14780605 0.08863688 0.10953311 ... 0.06301603 0.20056973 0.12045699]
 [0.15163323 0.09782784 0.09334334 ... 0.04542028 0.21368596 0.10479145]
 ...
 [0.63747096 0.16284882 0.14563535 ... 0.10632606 0.19450742 0.16728002]
 [0.6865454  0.11476872 0.1172314  ... 0.110745   0.19165677 0.17684108]
 [0.18456294 0.11323586 0.1546417  ... 0.06125081 0.22535042 0.12680292]] (3312, 48, 167)
23/23 [==============================] - 0s 3ms/step
[[0.20824313 0.0881248  0.16722277 ... 0.05150552 0.23320223 0.10941558]
 [0.24737324 0.06702063 0.12564287 ... 0.01652116 0.20166954 0.11052865]
 [0.20411746 0.05729585 0.10929409 ... 0.04306439 0.18231897 0.09980038]
 ...
 [0.5677004  0.11195406 0.17107269 ... 0.06836107 0.1695418  0.16290882]
 [0.69346493 0.10954937 0.2017706  ... 0.06626639 0.21645893 0.11411975]
 [0.33962005 0.09136781 0.16880707 ... 0.06860206 0.1682523  0.10323023]] (720, 48, 167)
In [ ]:
testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))

testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
Test Score: 0.09 MAE
Test Score: 0.14 RMSE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,:1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()
model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dense(84))
#model.add(Dropout(.2))
#model.add(LSTM(100, activation='relu', return_sequences = True))
#model.add(Dropout(.01))

model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_1"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm (LSTM)                  (None, 48, 200)           294400    
_________________________________________________________________
dense_3 (Dense)              (None, 48, 167)           33567     
=================================================================
Total params: 327,967
Trainable params: 327,967
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_SU_C1 = model.fit(trainX,trainY, epochs=30, validation_split = 0.05, 
                          batch_size = 64, 
                         callbacks=[lr_decay])
Epoch 1/30
50/50 [==============================] - 4s 77ms/step - loss: 0.0258 - mae: 0.1073 - val_loss: 0.0184 - val_mae: 0.0876
Epoch 2/30
50/50 [==============================] - 4s 71ms/step - loss: 0.0197 - mae: 0.0927 - val_loss: 0.0171 - val_mae: 0.0841
Epoch 3/30
50/50 [==============================] - 4s 72ms/step - loss: 0.0182 - mae: 0.0883 - val_loss: 0.0167 - val_mae: 0.0835
Epoch 4/30
50/50 [==============================] - 4s 74ms/step - loss: 0.0174 - mae: 0.0862 - val_loss: 0.0166 - val_mae: 0.0840
Epoch 5/30
50/50 [==============================] - 4s 76ms/step - loss: 0.0169 - mae: 0.0849 - val_loss: 0.0165 - val_mae: 0.0836
Epoch 6/30
50/50 [==============================] - 4s 79ms/step - loss: 0.0165 - mae: 0.0841 - val_loss: 0.0164 - val_mae: 0.0833
Epoch 7/30
50/50 [==============================] - 4s 72ms/step - loss: 0.0163 - mae: 0.0834 - val_loss: 0.0164 - val_mae: 0.0833
Epoch 8/30
50/50 [==============================] - 4s 73ms/step - loss: 0.0161 - mae: 0.0829 - val_loss: 0.0164 - val_mae: 0.0833
Epoch 9/30
50/50 [==============================] - 4s 72ms/step - loss: 0.0160 - mae: 0.0825 - val_loss: 0.0164 - val_mae: 0.0835
Epoch 10/30
50/50 [==============================] - 4s 74ms/step - loss: 0.0158 - mae: 0.0822 - val_loss: 0.0163 - val_mae: 0.0829
Epoch 11/30
50/50 [==============================] - 4s 75ms/step - loss: 0.0158 - mae: 0.0820 - val_loss: 0.0163 - val_mae: 0.0833
Epoch 12/30
50/50 [==============================] - 4s 74ms/step - loss: 0.0157 - mae: 0.0818 - val_loss: 0.0163 - val_mae: 0.0833
Epoch 13/30
50/50 [==============================] - 4s 74ms/step - loss: 0.0156 - mae: 0.0816 - val_loss: 0.0163 - val_mae: 0.0831
Epoch 14/30
50/50 [==============================] - 4s 74ms/step - loss: 0.0156 - mae: 0.0815 - val_loss: 0.0163 - val_mae: 0.0829
Epoch 15/30
50/50 [==============================] - 4s 71ms/step - loss: 0.0155 - mae: 0.0814 - val_loss: 0.0163 - val_mae: 0.0833
Epoch 16/30
50/50 [==============================] - 4s 71ms/step - loss: 0.0155 - mae: 0.0813 - val_loss: 0.0163 - val_mae: 0.0832
Epoch 17/30
50/50 [==============================] - 4s 74ms/step - loss: 0.0155 - mae: 0.0813 - val_loss: 0.0163 - val_mae: 0.0832
Epoch 18/30
50/50 [==============================] - 4s 71ms/step - loss: 0.0155 - mae: 0.0812 - val_loss: 0.0163 - val_mae: 0.0833
Epoch 19/30
50/50 [==============================] - 4s 73ms/step - loss: 0.0154 - mae: 0.0812 - val_loss: 0.0163 - val_mae: 0.0833
Epoch 20/30
50/50 [==============================] - 4s 72ms/step - loss: 0.0154 - mae: 0.0812 - val_loss: 0.0163 - val_mae: 0.0833
Epoch 21/30
50/50 [==============================] - 4s 72ms/step - loss: 0.0154 - mae: 0.0811 - val_loss: 0.0163 - val_mae: 0.0832
Epoch 22/30
50/50 [==============================] - 4s 73ms/step - loss: 0.0154 - mae: 0.0811 - val_loss: 0.0163 - val_mae: 0.0832
Epoch 23/30
50/50 [==============================] - 4s 75ms/step - loss: 0.0154 - mae: 0.0811 - val_loss: 0.0163 - val_mae: 0.0832
Epoch 24/30
50/50 [==============================] - 4s 73ms/step - loss: 0.0154 - mae: 0.0810 - val_loss: 0.0163 - val_mae: 0.0832
Epoch 25/30
50/50 [==============================] - 4s 71ms/step - loss: 0.0154 - mae: 0.0810 - val_loss: 0.0163 - val_mae: 0.0832
Epoch 26/30
50/50 [==============================] - 4s 73ms/step - loss: 0.0154 - mae: 0.0810 - val_loss: 0.0163 - val_mae: 0.0831
Epoch 27/30
50/50 [==============================] - 4s 71ms/step - loss: 0.0154 - mae: 0.0810 - val_loss: 0.0163 - val_mae: 0.0832
Epoch 28/30
50/50 [==============================] - 4s 74ms/step - loss: 0.0154 - mae: 0.0810 - val_loss: 0.0163 - val_mae: 0.0833
Epoch 29/30
50/50 [==============================] - 4s 74ms/step - loss: 0.0154 - mae: 0.0810 - val_loss: 0.0163 - val_mae: 0.0832
Epoch 30/30
50/50 [==============================] - 4s 71ms/step - loss: 0.0154 - mae: 0.0810 - val_loss: 0.0163 - val_mae: 0.0832
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_SU_C1.history['loss'], label='train')
plt.plot(history_SU_C1.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C1 = model.predict(trainX, verbose = 1)
print(testingtrain_C1[0], testingtrain_C1.shape)

testingtest_C1 = model.predict(testX, verbose = 1)
print(testingtest_C1[0], testingtest_C1.shape)
104/104 [==============================] - 1s 9ms/step
[[0.17370307 0.12107642 0.14646214 ... 0.08570936 0.23780207 0.10867125]
 [0.20022184 0.11016898 0.12948492 ... 0.10406443 0.25904483 0.10832312]
 [0.25332412 0.10811915 0.09947018 ... 0.11606492 0.25271112 0.09329774]
 ...
 [0.56087375 0.11861707 0.02572009 ... 0.08887669 0.28267318 0.18779086]
 [0.5400041  0.12358303 0.04599467 ... 0.05143996 0.271454   0.12263075]
 [0.31910166 0.12016252 0.08328786 ... 0.06032312 0.25301188 0.10886428]] (3312, 48, 167)
23/23 [==============================] - 0s 8ms/step
[[0.14495568 0.08740523 0.13959606 ... 0.06051768 0.17653866 0.0830391 ]
 [0.15072265 0.10086653 0.1516912  ... 0.07471158 0.19986656 0.07630885]
 [0.17813626 0.08857474 0.13622479 ... 0.06492087 0.18611033 0.07423795]
 ...
 [0.5122412  0.10693597 0.13099277 ... 0.03965596 0.20452552 0.1455587 ]
 [0.4255342  0.1175542  0.12808159 ... 0.0382842  0.21766718 0.11035167]
 [0.23793384 0.12045196 0.10899072 ... 0.03065249 0.2249241  0.07697612]] (720, 48, 167)
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.13 RMSE
Test Score: 0.13 RMSE
Train Score: 0.09 MAE
Test Score: 0.09 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(testingtest_C1[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, testingtest_C1[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
####################
X_SU_C2 = X_SU_C2.drop(columns = 'cluster')
X_SU_C2 = X_SU_C2.transpose()
X_SU_C2.head()
Out[ ]:
17 33 35 55 57 82 91 96 114 122 129 138 141 143 145 152 159 191 203 207 228 253 254 255 279 293
Datetime
2012-12-01 00:00:00 0.705 0.546 0.911 0.935 0.625 2.041 0.248 0.308 0.202 0.250 0.203 1.729 1.861 0.212 0.772 0.350 0.373 0.831 0.451 0.411 0.723 0.975 1.132 0.375 0.113 0.240
2012-12-01 00:30:00 0.605 0.879 0.809 0.737 0.213 1.226 0.246 0.341 0.126 0.246 0.095 1.886 1.272 0.226 0.825 0.350 0.973 0.763 0.105 0.313 0.625 1.326 1.078 0.374 0.125 0.238
2012-12-01 01:00:00 0.724 0.893 0.484 0.636 0.500 0.150 0.179 0.365 0.115 0.255 0.131 1.938 1.195 0.236 0.779 0.288 0.178 0.744 0.385 0.747 0.538 1.344 1.088 0.395 0.094 0.211
2012-12-01 01:30:00 0.555 1.464 0.290 0.242 0.475 0.137 0.128 0.272 0.774 0.251 0.085 1.889 1.339 0.217 0.859 0.288 0.243 0.738 0.102 0.310 0.539 1.045 0.481 0.397 0.125 0.537
2012-12-01 02:00:00 0.686 0.220 0.208 0.248 0.213 0.153 0.063 0.349 0.084 0.241 0.134 1.872 1.344 0.253 0.808 0.300 0.160 0.594 0.106 0.328 0.494 0.925 0.143 0.376 0.113 0.837
In [ ]:
X_SU_C2 = X_SU_C2.values
cap = np.percentile(X_SU_C2, 97)   
X_SU_C2[X_SU_C2 > cap] = cap
In [ ]:
training_size=int(X_SU_C2.shape[0]*0.80)

test_size=(X_SU_C2.shape[0])-training_size

train,test=X_SU_C2[0:training_size],X_SU_C2[training_size:(X_SU_C2.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3312, 48, 26) (3312, 48, 26) 
 (720, 48, 26) (720, 48, 26)
In [ ]:
                             ###Building a sequential network:
Model_2 = models.Sequential()
Model_2.add(layers.Dense(1500, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_2.add(Dropout(0.5))
Model_2.add(BatchNormalization())


Model_2.add(layers.Dense(50, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_2.add(Dropout(0.5))
Model_2.add(BatchNormalization())

Model_2.add((Dense(trainX.shape[2])))
Model_2.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_2.summary()
Model: "sequential_2"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_4 (Dense)              (None, 48, 1500)          40500     
_________________________________________________________________
dropout_2 (Dropout)          (None, 48, 1500)          0         
_________________________________________________________________
batch_normalization_2 (Batch (None, 48, 1500)          6000      
_________________________________________________________________
dense_5 (Dense)              (None, 48, 50)            75050     
_________________________________________________________________
dropout_3 (Dropout)          (None, 48, 50)            0         
_________________________________________________________________
batch_normalization_3 (Batch (None, 48, 50)            200       
_________________________________________________________________
dense_6 (Dense)              (None, 48, 26)            1326      
=================================================================
Total params: 123,076
Trainable params: 119,976
Non-trainable params: 3,100
_________________________________________________________________
In [ ]:
model_train = Model_2.fit(trainX,trainY, epochs=50, validation_split = 0.05, batch_size = 64)
Epoch 1/50
50/50 [==============================] - 1s 13ms/step - loss: 1.1978 - mae: 0.7770 - val_loss: 0.6328 - val_mae: 0.5223
Epoch 2/50
50/50 [==============================] - 0s 10ms/step - loss: 0.7546 - mae: 0.5945 - val_loss: 0.4758 - val_mae: 0.4296
Epoch 3/50
50/50 [==============================] - 0s 10ms/step - loss: 0.5111 - mae: 0.4769 - val_loss: 0.3853 - val_mae: 0.4133
Epoch 4/50
50/50 [==============================] - 0s 9ms/step - loss: 0.3730 - mae: 0.4124 - val_loss: 0.3721 - val_mae: 0.4157
Epoch 5/50
50/50 [==============================] - 0s 9ms/step - loss: 0.2987 - mae: 0.3730 - val_loss: 0.3687 - val_mae: 0.4123
Epoch 6/50
50/50 [==============================] - 0s 9ms/step - loss: 0.2565 - mae: 0.3488 - val_loss: 0.3680 - val_mae: 0.4047
Epoch 7/50
50/50 [==============================] - 0s 9ms/step - loss: 0.2304 - mae: 0.3309 - val_loss: 0.3729 - val_mae: 0.4006
Epoch 8/50
50/50 [==============================] - 0s 10ms/step - loss: 0.2121 - mae: 0.3171 - val_loss: 0.3707 - val_mae: 0.3955
Epoch 9/50
50/50 [==============================] - 0s 9ms/step - loss: 0.2007 - mae: 0.3074 - val_loss: 0.3681 - val_mae: 0.3904
Epoch 10/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1929 - mae: 0.3008 - val_loss: 0.3658 - val_mae: 0.3852
Epoch 11/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1865 - mae: 0.2953 - val_loss: 0.3606 - val_mae: 0.3798
Epoch 12/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1817 - mae: 0.2909 - val_loss: 0.3577 - val_mae: 0.3762
Epoch 13/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1773 - mae: 0.2870 - val_loss: 0.3551 - val_mae: 0.3713
Epoch 14/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1744 - mae: 0.2846 - val_loss: 0.3459 - val_mae: 0.3689
Epoch 15/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1719 - mae: 0.2815 - val_loss: 0.3439 - val_mae: 0.3662
Epoch 16/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1692 - mae: 0.2798 - val_loss: 0.3374 - val_mae: 0.3653
Epoch 17/50
50/50 [==============================] - 0s 10ms/step - loss: 0.1670 - mae: 0.2784 - val_loss: 0.3346 - val_mae: 0.3666
Epoch 18/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1640 - mae: 0.2757 - val_loss: 0.3411 - val_mae: 0.3642
Epoch 19/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1619 - mae: 0.2742 - val_loss: 0.3357 - val_mae: 0.3652
Epoch 20/50
50/50 [==============================] - 0s 10ms/step - loss: 0.1615 - mae: 0.2731 - val_loss: 0.3470 - val_mae: 0.3642
Epoch 21/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1594 - mae: 0.2716 - val_loss: 0.3423 - val_mae: 0.3646
Epoch 22/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1577 - mae: 0.2704 - val_loss: 0.3422 - val_mae: 0.3655
Epoch 23/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1558 - mae: 0.2691 - val_loss: 0.3388 - val_mae: 0.3671
Epoch 24/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1546 - mae: 0.2677 - val_loss: 0.3423 - val_mae: 0.3670
Epoch 25/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1536 - mae: 0.2670 - val_loss: 0.3472 - val_mae: 0.3666
Epoch 26/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1526 - mae: 0.2659 - val_loss: 0.3448 - val_mae: 0.3650
Epoch 27/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1512 - mae: 0.2652 - val_loss: 0.3372 - val_mae: 0.3665
Epoch 28/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1498 - mae: 0.2638 - val_loss: 0.3498 - val_mae: 0.3643
Epoch 29/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1491 - mae: 0.2632 - val_loss: 0.3423 - val_mae: 0.3645
Epoch 30/50
50/50 [==============================] - 0s 10ms/step - loss: 0.1488 - mae: 0.2631 - val_loss: 0.3412 - val_mae: 0.3666
Epoch 31/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1471 - mae: 0.2615 - val_loss: 0.3456 - val_mae: 0.3652
Epoch 32/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1466 - mae: 0.2607 - val_loss: 0.3474 - val_mae: 0.3668
Epoch 33/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1466 - mae: 0.2608 - val_loss: 0.3418 - val_mae: 0.3669
Epoch 34/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1448 - mae: 0.2598 - val_loss: 0.3420 - val_mae: 0.3642
Epoch 35/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1443 - mae: 0.2587 - val_loss: 0.3455 - val_mae: 0.3647
Epoch 36/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1439 - mae: 0.2585 - val_loss: 0.3506 - val_mae: 0.3650
Epoch 37/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1434 - mae: 0.2588 - val_loss: 0.3595 - val_mae: 0.3660
Epoch 38/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1429 - mae: 0.2572 - val_loss: 0.3409 - val_mae: 0.3657
Epoch 39/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1418 - mae: 0.2571 - val_loss: 0.3501 - val_mae: 0.3664
Epoch 40/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1417 - mae: 0.2565 - val_loss: 0.3407 - val_mae: 0.3662
Epoch 41/50
50/50 [==============================] - 0s 10ms/step - loss: 0.1421 - mae: 0.2576 - val_loss: 0.3530 - val_mae: 0.3669
Epoch 42/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1398 - mae: 0.2553 - val_loss: 0.3494 - val_mae: 0.3659
Epoch 43/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1399 - mae: 0.2558 - val_loss: 0.3512 - val_mae: 0.3653
Epoch 44/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1394 - mae: 0.2549 - val_loss: 0.3420 - val_mae: 0.3653
Epoch 45/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1394 - mae: 0.2541 - val_loss: 0.3461 - val_mae: 0.3689
Epoch 46/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1392 - mae: 0.2558 - val_loss: 0.3546 - val_mae: 0.3666
Epoch 47/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1391 - mae: 0.2547 - val_loss: 0.3587 - val_mae: 0.3654
Epoch 48/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1382 - mae: 0.2535 - val_loss: 0.3510 - val_mae: 0.3628
Epoch 49/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1381 - mae: 0.2540 - val_loss: 0.3471 - val_mae: 0.3679
Epoch 50/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1375 - mae: 0.2526 - val_loss: 0.3503 - val_mae: 0.3665
In [ ]:
Seq_train = Model_2.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_2.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
104/104 [==============================] - 0s 3ms/step
[[0.35792458 0.27138147 0.5171537  ... 0.566528   0.19821747 0.34954226]
 [0.34872922 0.27047595 0.45000678 ... 0.5783095  0.17239226 0.36704502]
 [0.37024724 0.25202236 0.3919329  ... 0.6036279  0.19117498 0.3535711 ]
 ...
 [0.51985145 0.4556971  0.54732394 ... 0.7137605  0.2704806  0.5452186 ]
 [0.43946213 0.37549224 0.52436525 ... 0.6427347  0.24447134 0.53599155]
 [0.3840535  0.3056467  0.45710158 ... 0.628814   0.21519312 0.35442522]] (3312, 48, 26)
23/23 [==============================] - 0s 3ms/step
[[0.46434465 0.38003448 0.4255203  ... 0.70740134 0.18513173 0.38805702]
 [0.339398   0.25405556 0.35749942 ... 0.6260194  0.13370682 0.32096386]
 [0.36945587 0.29831034 0.37982547 ... 0.6401349  0.17200397 0.35314795]
 ...
 [0.43552715 0.63939553 0.56575274 ... 0.7509628  0.33258152 0.39133236]
 [0.3882097  0.32356432 0.44894847 ... 0.7274097  0.2533331  0.32111636]
 [0.34975767 0.33980167 0.42515546 ... 0.65640944 0.1924972  0.33791468]] (720, 48, 26)
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], Seq_train[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

trainMAE = np.mean(mae(trainY[:,1,:], Seq_train[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.32 RMSE
Test Score: 0.41 RMSE
Train Score: 0.22 MAE
Test Score: 0.26 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(70, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))

#model.add(LSTM(100,  activation='relu'))
model.add(Dense(trainX.shape[2]))
#model.add(Dropout(.01))

model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_2 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_4"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_2 (LSTM)                (None, 48, 70)            27160     
_________________________________________________________________
dense_8 (Dense)              (None, 48, 26)            1846      
=================================================================
Total params: 29,006
Trainable params: 29,006
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C2 = model.fit(trainX,trainY, epochs=30, validation_split = 0.05, batch_size=32, callbacks=[lr_decay])
Epoch 1/30
99/99 [==============================] - 7s 72ms/step - loss: 0.3248 - mae: 0.3897 - val_loss: 0.3309 - val_mae: 0.3948
Epoch 2/30
99/99 [==============================] - 7s 69ms/step - loss: 0.2383 - mae: 0.3382 - val_loss: 0.3252 - val_mae: 0.3863
Epoch 3/30
99/99 [==============================] - 7s 70ms/step - loss: 0.2178 - mae: 0.3210 - val_loss: 0.3359 - val_mae: 0.3824
Epoch 4/30
99/99 [==============================] - 7s 71ms/step - loss: 0.2035 - mae: 0.3103 - val_loss: 0.3415 - val_mae: 0.3814
Epoch 5/30
99/99 [==============================] - 7s 70ms/step - loss: 0.1941 - mae: 0.3022 - val_loss: 0.3448 - val_mae: 0.3807
Epoch 6/30
99/99 [==============================] - 7s 72ms/step - loss: 0.1859 - mae: 0.2954 - val_loss: 0.3501 - val_mae: 0.3803
Epoch 7/30
99/99 [==============================] - 7s 74ms/step - loss: 0.1800 - mae: 0.2907 - val_loss: 0.3523 - val_mae: 0.3774
Epoch 8/30
99/99 [==============================] - 7s 70ms/step - loss: 0.1760 - mae: 0.2867 - val_loss: 0.3521 - val_mae: 0.3769
Epoch 9/30
99/99 [==============================] - 7s 71ms/step - loss: 0.1731 - mae: 0.2843 - val_loss: 0.3550 - val_mae: 0.3762
Epoch 10/30
99/99 [==============================] - 7s 69ms/step - loss: 0.1709 - mae: 0.2821 - val_loss: 0.3542 - val_mae: 0.3754
Epoch 11/30
99/99 [==============================] - 7s 70ms/step - loss: 0.1691 - mae: 0.2806 - val_loss: 0.3523 - val_mae: 0.3752
Epoch 12/30
99/99 [==============================] - 7s 71ms/step - loss: 0.1679 - mae: 0.2795 - val_loss: 0.3470 - val_mae: 0.3745
Epoch 13/30
99/99 [==============================] - 7s 70ms/step - loss: 0.1667 - mae: 0.2785 - val_loss: 0.3550 - val_mae: 0.3746
Epoch 14/30
99/99 [==============================] - 7s 70ms/step - loss: 0.1659 - mae: 0.2775 - val_loss: 0.3519 - val_mae: 0.3745
Epoch 15/30
99/99 [==============================] - 7s 69ms/step - loss: 0.1652 - mae: 0.2771 - val_loss: 0.3537 - val_mae: 0.3745
Epoch 16/30
99/99 [==============================] - 7s 71ms/step - loss: 0.1647 - mae: 0.2765 - val_loss: 0.3526 - val_mae: 0.3740
Epoch 17/30
99/99 [==============================] - 7s 70ms/step - loss: 0.1642 - mae: 0.2762 - val_loss: 0.3530 - val_mae: 0.3742
Epoch 18/30
99/99 [==============================] - 7s 69ms/step - loss: 0.1639 - mae: 0.2757 - val_loss: 0.3528 - val_mae: 0.3744
Epoch 19/30
99/99 [==============================] - 7s 71ms/step - loss: 0.1636 - mae: 0.2757 - val_loss: 0.3545 - val_mae: 0.3743
Epoch 20/30
99/99 [==============================] - 7s 68ms/step - loss: 0.1634 - mae: 0.2753 - val_loss: 0.3537 - val_mae: 0.3744
Epoch 21/30
99/99 [==============================] - 7s 70ms/step - loss: 0.1632 - mae: 0.2752 - val_loss: 0.3540 - val_mae: 0.3744
Epoch 22/30
99/99 [==============================] - 7s 72ms/step - loss: 0.1631 - mae: 0.2750 - val_loss: 0.3540 - val_mae: 0.3742
Epoch 23/30
99/99 [==============================] - 7s 70ms/step - loss: 0.1630 - mae: 0.2749 - val_loss: 0.3532 - val_mae: 0.3744
Epoch 24/30
99/99 [==============================] - 7s 70ms/step - loss: 0.1629 - mae: 0.2749 - val_loss: 0.3542 - val_mae: 0.3743
Epoch 25/30
99/99 [==============================] - 7s 69ms/step - loss: 0.1628 - mae: 0.2747 - val_loss: 0.3536 - val_mae: 0.3744
Epoch 26/30
99/99 [==============================] - 7s 71ms/step - loss: 0.1627 - mae: 0.2748 - val_loss: 0.3540 - val_mae: 0.3744
Epoch 27/30
99/99 [==============================] - 7s 70ms/step - loss: 0.1627 - mae: 0.2746 - val_loss: 0.3536 - val_mae: 0.3743
Epoch 28/30
99/99 [==============================] - 7s 68ms/step - loss: 0.1627 - mae: 0.2747 - val_loss: 0.3537 - val_mae: 0.3743
Epoch 29/30
99/99 [==============================] - 7s 69ms/step - loss: 0.1626 - mae: 0.2746 - val_loss: 0.3537 - val_mae: 0.3743
Epoch 30/30
99/99 [==============================] - 7s 69ms/step - loss: 0.1626 - mae: 0.2746 - val_loss: 0.3537 - val_mae: 0.3743
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C2.history['loss'], label='train')
plt.plot(history_C2.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C2 = model.predict(trainX, verbose = 1)
print(testingtrain_C2[0], testingtrain_C2.shape)

testingtest_C2 = model.predict(testX, verbose = 1)
print(testingtest_C2[0], testingtest_C2.shape)
104/104 [==============================] - 1s 7ms/step
[[0.49820995 0.33638507 0.29802364 ... 0.55832607 0.27477062 0.41769475]
 [0.643589   0.49359602 0.4200122  ... 0.79448456 0.3956798  0.61937517]
 [0.7209446  0.530654   0.4704213  ... 0.8493444  0.42935374 0.6644017 ]
 ...
 [0.76779395 0.43903095 0.40719748 ... 0.5566238  0.10882582 0.6581437 ]
 [0.58724093 0.32045364 0.3907536  ... 0.5956754  0.10199863 0.5636816 ]
 [0.51307553 0.2636911  0.2886059  ... 0.50559497 0.07264774 0.38562033]] (3312, 48, 26)
23/23 [==============================] - 0s 7ms/step
[[0.27468136 0.20221213 0.30884612 ... 0.4706277  0.24745718 0.30757934]
 [0.40281376 0.28099364 0.41873416 ... 0.63794196 0.32640788 0.4391896 ]
 [0.42021498 0.26185903 0.47040635 ... 0.72195196 0.34983334 0.49484745]
 ...
 [0.45341074 0.4521203  0.49218026 ... 0.76404023 0.21878716 0.5051712 ]
 [0.36019677 0.39289165 0.44276267 ... 0.7261033  0.17815855 0.44991973]
 [0.30684754 0.30852652 0.39736646 ... 0.68343735 0.15076089 0.40675247]] (720, 48, 26)
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C2[:,1,:]))
print('Test Score: %.2f RMSE' % (trainScore))

testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C2[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.50 RMSE
Test Score: 0.41 RMSE
Train Score: 0.33 MAE
Test Score: 0.27 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C2[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, testingtest_C2[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
########cluster 3
X_SU_C3 = X_SU_C3.drop(columns = 'cluster')
X_SU_C3 = X_SU_C3.transpose()
X_SU_C3.head()
Out[ ]:
6 8 11 12 16 21 24 38 42 46 47 49 51 54 56 58 59 61 62 63 65 67 68 69 70 75 79 80 84 86 89 93 95 98 100 102 103 106 107 108 ... 185 193 195 197 199 200 201 204 206 211 212 222 229 230 234 242 243 246 248 249 250 251 256 257 262 265 267 271 274 275 276 281 285 286 287 289 290 294 299 300
Datetime
2012-12-01 00:00:00 0.265 0.158 0.191 0.290 0.254 0.831 1.399 0.073 0.275 0.868 0.114 0.304 0.126 0.280 0.444 0.298 0.283 1.327 0.766 0.263 0.666 0.082 0.863 0.133 0.294 0.713 0.113 0.075 0.305 0.163 0.471 2.194 0.069 0.662 0.125 0.204 0.133 0.102 0.366 0.110 ... 0.361 1.160 0.246 2.250 0.646 0.294 2.688 0.160 0.508 0.295 0.353 1.191 0.240 0.238 0.362 0.555 0.284 1.301 1.369 0.243 1.107 0.281 0.665 0.197 0.268 0.131 1.069 0.617 0.268 0.131 0.165 0.793 0.138 0.266 0.250 0.276 0.208 0.231 0.225 0.919
2012-12-01 00:30:00 0.242 0.152 0.201 0.265 0.292 0.833 1.337 0.053 0.394 0.922 0.076 0.122 0.103 0.271 0.227 0.471 0.165 1.379 0.633 0.156 0.738 0.076 0.911 0.210 0.290 0.411 0.137 0.056 0.309 0.188 0.462 2.181 0.094 0.719 0.138 0.187 0.126 0.157 0.361 0.093 ... 0.359 1.049 0.243 2.196 1.112 0.289 2.675 0.273 0.536 0.304 0.282 1.146 0.200 0.213 0.408 0.109 0.315 0.719 0.624 0.232 0.524 0.327 0.595 0.180 0.324 0.144 0.883 0.371 0.296 0.113 0.145 0.981 0.112 0.289 0.263 0.720 0.191 0.295 0.065 0.770
2012-12-01 01:00:00 0.220 0.087 0.162 0.295 0.175 0.595 1.155 0.044 0.306 0.891 0.127 0.253 0.098 0.373 0.193 0.363 0.275 1.386 0.484 0.181 0.661 0.073 0.888 0.178 0.221 0.879 0.125 0.094 0.332 0.175 0.476 2.281 0.081 0.336 0.106 0.189 0.127 0.077 0.341 0.137 ... 0.360 0.966 0.241 1.515 1.548 0.295 2.795 0.236 0.525 0.290 0.296 1.106 0.141 0.169 0.438 0.107 0.281 0.703 0.229 0.226 0.369 0.268 0.499 0.198 0.275 0.151 0.346 0.673 0.285 0.175 0.133 1.196 0.103 0.263 0.250 0.718 0.160 0.173 0.150 0.280
2012-12-01 01:30:00 0.236 0.160 0.146 0.228 0.167 0.246 0.660 0.045 0.325 0.831 0.094 0.250 0.093 0.262 0.197 0.264 0.355 1.308 0.321 0.181 0.697 0.072 0.942 0.130 0.272 2.081 0.119 0.031 0.305 0.150 0.305 2.100 0.069 0.403 0.094 0.196 0.122 0.109 0.346 0.058 ... 0.348 1.106 0.229 1.160 1.103 0.308 2.862 0.241 0.519 0.279 0.260 1.113 0.244 0.269 0.367 0.146 0.323 0.680 0.249 0.193 0.442 0.162 0.331 0.296 0.266 0.157 0.323 0.290 0.256 0.131 0.122 1.144 0.097 0.366 0.375 0.796 0.148 0.266 0.134 0.209
2012-12-01 02:00:00 0.239 0.171 0.116 0.196 0.185 0.270 0.192 0.054 0.319 0.303 0.099 0.235 0.068 0.324 0.225 0.246 0.212 1.289 0.165 0.169 0.652 0.068 0.886 0.179 0.272 0.204 0.131 0.081 0.324 0.150 0.205 0.894 0.069 0.291 0.119 0.196 0.131 0.110 0.346 0.148 ... 0.351 0.972 0.233 1.146 0.861 0.293 2.814 0.197 0.549 0.299 0.279 1.116 0.181 0.213 0.368 0.112 0.277 0.650 0.355 0.187 0.297 1.918 0.358 0.215 0.272 0.181 0.264 0.253 0.312 0.119 0.091 1.059 0.099 0.314 0.175 0.735 0.165 0.260 0.201 0.214

5 rows × 103 columns

In [ ]:
X_SU_C3 = X_SU_C3.values
cap = np.percentile(X_SU_C2, 97)   
X_SU_C2[X_SU_C2 > cap] = cap
In [ ]:
training_size=int(X_SU_C3.shape[0]*0.80)

test_size=(X_SU_C3.shape[0])-training_size

train,test=X_SU_C3[0:training_size],X_SU_C3[training_size:(X_SU_C3.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)


print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3312, 48, 103) (3312, 48, 103) 
 (720, 48, 103) (720, 48, 103)
In [ ]:
                             ###Building a sequential network:
Model_3 = models.Sequential()
Model_3.add(layers.Dense(400, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_3.add(Dropout(0.2))
Model_3.add(layers.Dense(200, activation='relu'))
Model_3.add(Dropout(0.2))
Model_3.add((Dense(trainX.shape[2])))
Model_3.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_3.summary()
Model: "sequential_5"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_9 (Dense)              (None, 48, 400)           41600     
_________________________________________________________________
dropout_4 (Dropout)          (None, 48, 400)           0         
_________________________________________________________________
dense_10 (Dense)             (None, 48, 200)           80200     
_________________________________________________________________
dropout_5 (Dropout)          (None, 48, 200)           0         
_________________________________________________________________
dense_11 (Dense)             (None, 48, 103)           20703     
=================================================================
Total params: 142,503
Trainable params: 142,503
Non-trainable params: 0
_________________________________________________________________
In [ ]:
model_train = Model_3.fit(trainX,trainY, epochs=50, validation_split = 0.05, batch_size=64)
Epoch 1/50
50/50 [==============================] - 0s 9ms/step - loss: 0.1927 - mae: 0.2755 - val_loss: 0.1587 - val_mae: 0.2274
Epoch 2/50
50/50 [==============================] - 0s 6ms/step - loss: 0.1344 - mae: 0.2289 - val_loss: 0.1467 - val_mae: 0.2178
Epoch 3/50
50/50 [==============================] - 0s 6ms/step - loss: 0.1164 - mae: 0.2122 - val_loss: 0.1416 - val_mae: 0.2113
Epoch 4/50
50/50 [==============================] - 0s 6ms/step - loss: 0.1055 - mae: 0.2016 - val_loss: 0.1432 - val_mae: 0.2075
Epoch 5/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0977 - mae: 0.1945 - val_loss: 0.1469 - val_mae: 0.2058
Epoch 6/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0918 - mae: 0.1890 - val_loss: 0.1445 - val_mae: 0.2087
Epoch 7/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0868 - mae: 0.1847 - val_loss: 0.1468 - val_mae: 0.2074
Epoch 8/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0829 - mae: 0.1811 - val_loss: 0.1453 - val_mae: 0.2090
Epoch 9/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0796 - mae: 0.1785 - val_loss: 0.1475 - val_mae: 0.2068
Epoch 10/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0767 - mae: 0.1758 - val_loss: 0.1483 - val_mae: 0.2077
Epoch 11/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0741 - mae: 0.1734 - val_loss: 0.1493 - val_mae: 0.2069
Epoch 12/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0719 - mae: 0.1715 - val_loss: 0.1499 - val_mae: 0.2062
Epoch 13/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0699 - mae: 0.1696 - val_loss: 0.1494 - val_mae: 0.2079
Epoch 14/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0681 - mae: 0.1679 - val_loss: 0.1485 - val_mae: 0.2065
Epoch 15/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0664 - mae: 0.1664 - val_loss: 0.1495 - val_mae: 0.2072
Epoch 16/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0651 - mae: 0.1650 - val_loss: 0.1499 - val_mae: 0.2052
Epoch 17/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0637 - mae: 0.1637 - val_loss: 0.1509 - val_mae: 0.2053
Epoch 18/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0626 - mae: 0.1625 - val_loss: 0.1494 - val_mae: 0.2062
Epoch 19/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0614 - mae: 0.1613 - val_loss: 0.1502 - val_mae: 0.2048
Epoch 20/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0604 - mae: 0.1604 - val_loss: 0.1504 - val_mae: 0.2049
Epoch 21/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0595 - mae: 0.1593 - val_loss: 0.1504 - val_mae: 0.2056
Epoch 22/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0587 - mae: 0.1584 - val_loss: 0.1503 - val_mae: 0.2053
Epoch 23/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0578 - mae: 0.1575 - val_loss: 0.1500 - val_mae: 0.2046
Epoch 24/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0571 - mae: 0.1567 - val_loss: 0.1501 - val_mae: 0.2042
Epoch 25/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0563 - mae: 0.1558 - val_loss: 0.1510 - val_mae: 0.2036
Epoch 26/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0557 - mae: 0.1551 - val_loss: 0.1504 - val_mae: 0.2043
Epoch 27/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0551 - mae: 0.1544 - val_loss: 0.1500 - val_mae: 0.2051
Epoch 28/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0545 - mae: 0.1537 - val_loss: 0.1502 - val_mae: 0.2042
Epoch 29/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0539 - mae: 0.1531 - val_loss: 0.1509 - val_mae: 0.2048
Epoch 30/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0534 - mae: 0.1525 - val_loss: 0.1510 - val_mae: 0.2038
Epoch 31/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0529 - mae: 0.1519 - val_loss: 0.1506 - val_mae: 0.2039
Epoch 32/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0523 - mae: 0.1512 - val_loss: 0.1512 - val_mae: 0.2031
Epoch 33/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0519 - mae: 0.1506 - val_loss: 0.1519 - val_mae: 0.2034
Epoch 34/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0515 - mae: 0.1501 - val_loss: 0.1517 - val_mae: 0.2040
Epoch 35/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0512 - mae: 0.1497 - val_loss: 0.1514 - val_mae: 0.2031
Epoch 36/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0507 - mae: 0.1492 - val_loss: 0.1518 - val_mae: 0.2029
Epoch 37/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0503 - mae: 0.1487 - val_loss: 0.1514 - val_mae: 0.2029
Epoch 38/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0500 - mae: 0.1483 - val_loss: 0.1518 - val_mae: 0.2024
Epoch 39/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0497 - mae: 0.1479 - val_loss: 0.1510 - val_mae: 0.2034
Epoch 40/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0494 - mae: 0.1475 - val_loss: 0.1520 - val_mae: 0.2023
Epoch 41/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0491 - mae: 0.1471 - val_loss: 0.1519 - val_mae: 0.2017
Epoch 42/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0488 - mae: 0.1468 - val_loss: 0.1521 - val_mae: 0.2023
Epoch 43/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0485 - mae: 0.1463 - val_loss: 0.1515 - val_mae: 0.2022
Epoch 44/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0482 - mae: 0.1460 - val_loss: 0.1526 - val_mae: 0.2021
Epoch 45/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0479 - mae: 0.1457 - val_loss: 0.1518 - val_mae: 0.2024
Epoch 46/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0477 - mae: 0.1454 - val_loss: 0.1517 - val_mae: 0.2018
Epoch 47/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0474 - mae: 0.1450 - val_loss: 0.1523 - val_mae: 0.2018
Epoch 48/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0472 - mae: 0.1447 - val_loss: 0.1521 - val_mae: 0.2024
Epoch 49/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0469 - mae: 0.1444 - val_loss: 0.1522 - val_mae: 0.2018
Epoch 50/50
50/50 [==============================] - 0s 6ms/step - loss: 0.0468 - mae: 0.1441 - val_loss: 0.1521 - val_mae: 0.2021
In [ ]:
Seq_train = Model_3.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_3.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
104/104 [==============================] - 0s 3ms/step
[[0.2631998  0.14203031 0.43206245 ... 0.36048666 0.37005886 0.5263839 ]
 [0.25662935 0.21095914 0.41981125 ... 0.34708974 0.3005248  0.49849707]
 [0.26408362 0.20770358 0.3186258  ... 0.29558885 0.18778461 0.39476997]
 ...
 [0.35692418 0.22710177 0.46847194 ... 0.5926039  0.26461723 0.42841002]
 [0.33371344 0.307726   0.43679553 ... 0.25168645 0.21496093 0.7650572 ]
 [0.30820614 0.23607746 0.28262213 ... 0.27865863 0.17518751 0.88189006]] (3312, 48, 103)
23/23 [==============================] - 0s 3ms/step
[[0.2636573  0.22058609 0.30270714 ... 0.4107648  0.22557554 0.49712378]
 [0.26655447 0.14547709 0.23219648 ... 0.36384782 0.19937535 0.36036772]
 [0.2433799  0.16246603 0.1783595  ... 0.26692173 0.22343646 0.24516179]
 ...
 [0.3907968  0.3560605  0.45379454 ... 0.2916498  0.27923095 0.47847903]
 [0.31857637 0.2030628  0.5882673  ... 0.14677867 0.46803063 0.51787996]
 [0.2437283  0.24207538 0.5367209  ... 0.34200448 0.22203422 0.8449274 ]] (720, 48, 103)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:48,1,1], Seq_test[:48,1,1]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.28 RMSE
Test Score: 0.20 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dense(30))

model.add(Dense(trainX.shape[2]))
#model.add(Dropout(.01))

#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_4 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_7"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_4 (LSTM)                (None, 48, 200)           243200    
_________________________________________________________________
dense_13 (Dense)             (None, 48, 103)           20703     
=================================================================
Total params: 263,903
Trainable params: 263,903
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C3 = model.fit(trainX,trainY, epochs=50, validation_split = 0.05, batch_size=64, callbacks=[lr_decay])
Epoch 1/50
50/50 [==============================] - 4s 78ms/step - loss: 0.1712 - mae: 0.2583 - val_loss: 0.1561 - val_mae: 0.2393
Epoch 2/50
50/50 [==============================] - 4s 73ms/step - loss: 0.1260 - mae: 0.2200 - val_loss: 0.1462 - val_mae: 0.2237
Epoch 3/50
50/50 [==============================] - 4s 76ms/step - loss: 0.1118 - mae: 0.2053 - val_loss: 0.1475 - val_mae: 0.2191
Epoch 4/50
50/50 [==============================] - 4s 73ms/step - loss: 0.1037 - mae: 0.1978 - val_loss: 0.1501 - val_mae: 0.2160
Epoch 5/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0984 - mae: 0.1929 - val_loss: 0.1471 - val_mae: 0.2171
Epoch 6/50
50/50 [==============================] - 4s 75ms/step - loss: 0.0941 - mae: 0.1891 - val_loss: 0.1460 - val_mae: 0.2219
Epoch 7/50
50/50 [==============================] - 4s 76ms/step - loss: 0.0915 - mae: 0.1869 - val_loss: 0.1462 - val_mae: 0.2193
Epoch 8/50
50/50 [==============================] - 4s 74ms/step - loss: 0.0891 - mae: 0.1847 - val_loss: 0.1472 - val_mae: 0.2174
Epoch 9/50
50/50 [==============================] - 4s 75ms/step - loss: 0.0874 - mae: 0.1835 - val_loss: 0.1476 - val_mae: 0.2157
Epoch 10/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0861 - mae: 0.1820 - val_loss: 0.1481 - val_mae: 0.2152
Epoch 11/50
50/50 [==============================] - 4s 75ms/step - loss: 0.0850 - mae: 0.1812 - val_loss: 0.1475 - val_mae: 0.2153
Epoch 12/50
50/50 [==============================] - 4s 71ms/step - loss: 0.0842 - mae: 0.1806 - val_loss: 0.1477 - val_mae: 0.2146
Epoch 13/50
50/50 [==============================] - 4s 71ms/step - loss: 0.0834 - mae: 0.1799 - val_loss: 0.1481 - val_mae: 0.2144
Epoch 14/50
50/50 [==============================] - 4s 73ms/step - loss: 0.0829 - mae: 0.1794 - val_loss: 0.1476 - val_mae: 0.2144
Epoch 15/50
50/50 [==============================] - 4s 71ms/step - loss: 0.0824 - mae: 0.1790 - val_loss: 0.1480 - val_mae: 0.2139
Epoch 16/50
50/50 [==============================] - 4s 77ms/step - loss: 0.0820 - mae: 0.1786 - val_loss: 0.1475 - val_mae: 0.2140
Epoch 17/50
50/50 [==============================] - 4s 77ms/step - loss: 0.0817 - mae: 0.1784 - val_loss: 0.1477 - val_mae: 0.2142
Epoch 18/50
50/50 [==============================] - 4s 70ms/step - loss: 0.0815 - mae: 0.1782 - val_loss: 0.1476 - val_mae: 0.2141
Epoch 19/50
50/50 [==============================] - 4s 74ms/step - loss: 0.0813 - mae: 0.1780 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 20/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0811 - mae: 0.1778 - val_loss: 0.1478 - val_mae: 0.2139
Epoch 21/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0810 - mae: 0.1777 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 22/50
50/50 [==============================] - 4s 74ms/step - loss: 0.0809 - mae: 0.1776 - val_loss: 0.1477 - val_mae: 0.2137
Epoch 23/50
50/50 [==============================] - 4s 74ms/step - loss: 0.0808 - mae: 0.1775 - val_loss: 0.1477 - val_mae: 0.2139
Epoch 24/50
50/50 [==============================] - 4s 73ms/step - loss: 0.0807 - mae: 0.1775 - val_loss: 0.1476 - val_mae: 0.2139
Epoch 25/50
50/50 [==============================] - 4s 74ms/step - loss: 0.0807 - mae: 0.1774 - val_loss: 0.1476 - val_mae: 0.2139
Epoch 26/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0806 - mae: 0.1774 - val_loss: 0.1475 - val_mae: 0.2139
Epoch 27/50
50/50 [==============================] - 4s 74ms/step - loss: 0.0806 - mae: 0.1774 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 28/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0806 - mae: 0.1773 - val_loss: 0.1475 - val_mae: 0.2139
Epoch 29/50
50/50 [==============================] - 4s 71ms/step - loss: 0.0806 - mae: 0.1773 - val_loss: 0.1476 - val_mae: 0.2139
Epoch 30/50
50/50 [==============================] - 4s 73ms/step - loss: 0.0805 - mae: 0.1773 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 31/50
50/50 [==============================] - 4s 73ms/step - loss: 0.0805 - mae: 0.1773 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 32/50
50/50 [==============================] - 4s 73ms/step - loss: 0.0805 - mae: 0.1773 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 33/50
50/50 [==============================] - 4s 75ms/step - loss: 0.0805 - mae: 0.1773 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 34/50
50/50 [==============================] - 4s 75ms/step - loss: 0.0805 - mae: 0.1773 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 35/50
50/50 [==============================] - 4s 71ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 36/50
50/50 [==============================] - 4s 71ms/step - loss: 0.0805 - mae: 0.1773 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 37/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0805 - mae: 0.1773 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 38/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0805 - mae: 0.1773 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 39/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 40/50
50/50 [==============================] - 4s 73ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 41/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 42/50
50/50 [==============================] - 4s 73ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 43/50
50/50 [==============================] - 4s 74ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 44/50
50/50 [==============================] - 4s 71ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 45/50
50/50 [==============================] - 4s 73ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 46/50
50/50 [==============================] - 4s 75ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 47/50
50/50 [==============================] - 4s 72ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 48/50
50/50 [==============================] - 4s 71ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 49/50
50/50 [==============================] - 4s 73ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
Epoch 50/50
50/50 [==============================] - 4s 74ms/step - loss: 0.0805 - mae: 0.1772 - val_loss: 0.1476 - val_mae: 0.2138
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C3.history['loss'], label='train')
plt.plot(history_C3.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C3 = model.predict(trainX, verbose = 1)
print(testingtrain_C3[0], testingtrain_C3.shape)

testingtest_C3 = model.predict(testX, verbose = 1)
print(testingtest_C3[0], testingtest_C3.shape)
104/104 [==============================] - 1s 8ms/step
[[0.25956035 0.28249168 0.26869053 ... 0.44343212 0.30118513 0.51235706]
 [0.31708157 0.25171438 0.28425235 ... 0.5037397  0.30066836 0.6134491 ]
 [0.30924484 0.18384473 0.2568439  ... 0.44653353 0.18072176 0.5608191 ]
 ...
 [0.36802554 0.58430636 0.7726922  ... 0.4108377  0.22427912 0.48062235]
 [0.40357295 0.40269604 0.6836063  ... 0.4352592  0.21021812 0.6065389 ]
 [0.36471903 0.22758904 0.5152512  ... 0.47421396 0.16380681 0.59890413]] (3312, 48, 103)
23/23 [==============================] - 0s 8ms/step
[[0.25721473 0.31012264 0.26902622 ... 0.3562486  0.27066895 0.35104254]
 [0.2916188  0.32980195 0.25955716 ... 0.4549788  0.2945939  0.4859022 ]
 [0.30323395 0.3191586  0.21120931 ... 0.51286423 0.2937444  0.5479569 ]
 ...
 [0.30714375 0.2459789  0.4985051  ... 0.3289955  0.3945831  0.4632343 ]
 [0.30386272 0.12529324 0.40790164 ... 0.31480023 0.32146457 0.43287307]
 [0.27351636 0.10406762 0.37384695 ... 0.32093254 0.3273975  0.5038902 ]] (720, 48, 103)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


testMAE = np.mean(mae(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.30 RMSE
Test Score: 0.20 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C3[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,:1], marker='.', label="actual")
plt.plot(aa, testingtest_C3[:,1,:1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
########cluster 4
X_SU_C4 = X_SU_C4.drop(columns = 'cluster')
X_SU_C4 = X_SU_C4.transpose()
X_SU_C4.head()
Out[ ]:
20 25 104
Datetime
2012-12-01 00:00:00 1.000 0.213 4.785
2012-12-01 00:30:00 0.563 0.075 5.490
2012-12-01 01:00:00 0.588 0.050 3.372
2012-12-01 01:30:00 0.588 0.075 2.826
2012-12-01 02:00:00 0.600 0.063 2.885
In [ ]:
X_SU_C4 = X_SU_C4.values
cap = np.percentile(X_SU_C4, 97)   
X_SU_C4[X_SU_C4 > cap] = cap
In [ ]:
training_size=int(X_SU_C4.shape[0]*0.80)

test_size=(X_SU_C4.shape[0])-training_size

train,test=X_SU_C4[0:training_size],X_SU_C4[training_size:(X_SU_C4.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3312, 48, 3) (3312, 48, 3) 
 (720, 48, 3) (720, 48, 3)
In [ ]:
                             ###Building a sequential network:
Model_4 = models.Sequential()
Model_4.add(layers.Dense(100, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_4.add(Dropout(0.2))
Model_4.add(layers.Dense(50, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_4.add(Dropout(0.2))
Model_4.add((Dense(trainX.shape[2])))
Model_4.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_4.summary()
Model: "sequential_8"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_14 (Dense)             (None, 48, 100)           400       
_________________________________________________________________
dropout_6 (Dropout)          (None, 48, 100)           0         
_________________________________________________________________
dense_15 (Dense)             (None, 48, 50)            5050      
_________________________________________________________________
dropout_7 (Dropout)          (None, 48, 50)            0         
_________________________________________________________________
dense_16 (Dense)             (None, 48, 3)             153       
=================================================================
Total params: 5,603
Trainable params: 5,603
Non-trainable params: 0
_________________________________________________________________
In [ ]:
model_train = Model_4.fit(trainX,trainY, epochs=20, validation_split = 0.05, batch_size=64)
Epoch 1/20
50/50 [==============================] - 0s 6ms/step - loss: 1.3438 - mae: 0.7145 - val_loss: 0.9039 - val_mae: 0.6293
Epoch 2/20
50/50 [==============================] - 0s 4ms/step - loss: 1.0586 - mae: 0.7336 - val_loss: 0.9421 - val_mae: 0.7548
Epoch 3/20
50/50 [==============================] - 0s 3ms/step - loss: 1.0306 - mae: 0.7376 - val_loss: 0.9516 - val_mae: 0.7591
Epoch 4/20
50/50 [==============================] - 0s 3ms/step - loss: 1.0182 - mae: 0.7387 - val_loss: 0.9489 - val_mae: 0.7393
Epoch 5/20
50/50 [==============================] - 0s 3ms/step - loss: 1.0116 - mae: 0.7377 - val_loss: 0.9448 - val_mae: 0.7440
Epoch 6/20
50/50 [==============================] - 0s 3ms/step - loss: 1.0079 - mae: 0.7302 - val_loss: 0.9337 - val_mae: 0.7518
Epoch 7/20
50/50 [==============================] - 0s 3ms/step - loss: 1.0001 - mae: 0.7329 - val_loss: 0.9377 - val_mae: 0.7566
Epoch 8/20
50/50 [==============================] - 0s 4ms/step - loss: 0.9967 - mae: 0.7350 - val_loss: 0.9306 - val_mae: 0.7405
Epoch 9/20
50/50 [==============================] - 0s 3ms/step - loss: 0.9941 - mae: 0.7291 - val_loss: 0.9275 - val_mae: 0.7422
Epoch 10/20
50/50 [==============================] - 0s 3ms/step - loss: 0.9915 - mae: 0.7272 - val_loss: 0.9297 - val_mae: 0.7377
Epoch 11/20
50/50 [==============================] - 0s 3ms/step - loss: 0.9882 - mae: 0.7299 - val_loss: 0.9295 - val_mae: 0.7371
Epoch 12/20
50/50 [==============================] - 0s 4ms/step - loss: 0.9865 - mae: 0.7314 - val_loss: 0.9212 - val_mae: 0.7250
Epoch 13/20
50/50 [==============================] - 0s 3ms/step - loss: 0.9835 - mae: 0.7310 - val_loss: 0.9244 - val_mae: 0.7245
Epoch 14/20
50/50 [==============================] - 0s 4ms/step - loss: 0.9815 - mae: 0.7267 - val_loss: 0.9235 - val_mae: 0.7169
Epoch 15/20
50/50 [==============================] - 0s 3ms/step - loss: 0.9807 - mae: 0.7276 - val_loss: 0.9277 - val_mae: 0.7393
Epoch 16/20
50/50 [==============================] - 0s 3ms/step - loss: 0.9787 - mae: 0.7272 - val_loss: 0.9296 - val_mae: 0.7279
Epoch 17/20
50/50 [==============================] - 0s 3ms/step - loss: 0.9784 - mae: 0.7251 - val_loss: 0.9375 - val_mae: 0.7510
Epoch 18/20
50/50 [==============================] - 0s 3ms/step - loss: 0.9755 - mae: 0.7215 - val_loss: 0.9296 - val_mae: 0.7341
Epoch 19/20
50/50 [==============================] - 0s 4ms/step - loss: 0.9736 - mae: 0.7244 - val_loss: 0.9193 - val_mae: 0.7436
Epoch 20/20
50/50 [==============================] - 0s 3ms/step - loss: 0.9731 - mae: 0.7270 - val_loss: 0.9209 - val_mae: 0.7081
In [ ]:
Seq_train = Model_4.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_4.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
104/104 [==============================] - 0s 2ms/step
[[0.51447916 0.3802264  1.2049572 ]
 [0.37795562 0.23431635 1.4244468 ]
 [0.3787805  0.24005389 1.3785657 ]
 [0.39815992 0.26165384 1.2930142 ]
 [0.39287227 0.25641096 1.2933378 ]
 [0.39225137 0.2594415  1.2759386 ]
 [0.38519484 0.23450057 1.3721609 ]
 [0.38267675 0.22518826 1.4163638 ]
 [0.37797558 0.22371382 1.3977463 ]
 [0.38183355 0.22166203 1.4290401 ]
 [0.46983236 0.43551707 0.92788386]
 [0.5030392  0.48326784 0.88876534]
 [0.49397254 0.39009392 1.1040571 ]
 [0.53050053 0.439048   1.0941352 ]
 [0.62360317 0.45460016 1.2851727 ]
 [0.66444135 0.50295895 1.2098448 ]
 [0.6551112  0.47949386 1.2501884 ]
 [0.4648283  0.39280474 1.013857  ]
 [0.649485   0.4725693  1.2653239 ]
 [0.74461627 0.5902958  1.0425955 ]
 [0.82122517 0.6207584  1.0002911 ]
 [0.9309201  0.75961155 0.9842602 ]
 [0.9270859  0.70350486 0.9917036 ]
 [0.80988765 0.62354255 0.8495399 ]
 [0.8226478  0.6239298  0.9227246 ]
 [0.79365325 0.619708   0.8945495 ]
 [0.7708696  0.60226715 0.83525217]
 [0.73103213 0.58837456 0.8136543 ]
 [0.7334089  0.58901095 0.81340194]
 [0.65429235 0.5641345  0.7747395 ]
 [0.59954566 0.54657054 0.78288424]
 [0.68903816 0.6127457  0.85876095]
 [0.742764   0.64983726 0.9347279 ]
 [0.7463064  0.6588389  0.9613625 ]
 [0.67224646 0.5959055  0.82719934]
 [0.6513212  0.5725004  0.7860303 ]
 [0.7094805  0.6171309  0.8546208 ]
 [0.78385794 0.6324018  0.877517  ]
 [0.844149   0.65853393 0.9834286 ]
 [0.79349935 0.6290403  1.0433221 ]
 [0.8061218  0.62791574 1.0494257 ]
 [0.7947093  0.6221569  1.0634612 ]
 [0.7498523  0.6062544  1.067496  ]
 [0.80905795 0.60939866 1.0284114 ]
 [0.8295193  0.6989778  1.0287145 ]
 [0.7539766  0.6072783  1.0343156 ]
 [0.8454014  0.7119856  1.062136  ]
 [0.82980835 0.6382787  1.1361682 ]] (3312, 48, 3)
23/23 [==============================] - 0s 2ms/step
[[0.4320138  0.29638848 1.3878373 ]
 [0.38031495 0.21706414 1.448091  ]
 [0.38891184 0.22969425 1.4131591 ]
 [0.3577466  0.34509507 0.82076764]
 [0.43008342 0.43955106 0.870891  ]
 [0.35751784 0.3445167  0.82093036]
 [0.382784   0.39562774 0.8221122 ]
 [0.40192392 0.40826362 0.85278404]
 [0.35403785 0.36381483 0.7850814 ]
 [0.4507581  0.44795984 0.93160605]
 [0.43485025 0.33571017 0.9870286 ]
 [0.42518008 0.37285516 0.9527106 ]
 [0.6556865  0.7757208  0.95775783]
 [0.5974238  0.7233988  0.8750508 ]
 [0.5725517  0.62765133 0.945078  ]
 [0.41160366 0.4417103  0.8524898 ]
 [0.45759785 0.5284315  0.8544409 ]
 [0.43155676 0.4723313  0.8673403 ]
 [0.34356388 0.4000519  0.7200613 ]
 [0.47299555 0.51391226 0.94289696]
 [0.3450331  0.3808838  0.76198816]
 [0.4369009  0.49376172 0.8640964 ]
 [0.4031471  0.4281562  0.89041483]
 [0.41269055 0.46103144 0.83722675]
 [0.40081245 0.45146602 0.83029854]
 [0.40301457 0.42778558 0.8914325 ]
 [0.41122118 0.46931905 0.8242793 ]
 [0.36771375 0.4019674  0.8094175 ]
 [0.47138798 0.49887186 0.970314  ]
 [0.35652223 0.36710083 0.81660044]
 [0.46281978 0.4817344  0.9385278 ]
 [0.3870247  0.36504632 0.9350498 ]
 [0.43284896 0.4236871  0.940333  ]
 [0.6065955  0.66135365 1.0407457 ]
 [0.6048636  0.6579679  0.95593023]
 [0.65722525 0.61569816 1.1185519 ]
 [0.72712207 0.7550999  1.0889404 ]
 [0.6495433  0.6516321  1.0458395 ]
 [0.724661   0.6319471  1.2460703 ]
 [0.7187264  0.69729304 1.1361475 ]
 [0.78567076 0.8232859  1.1951096 ]
 [0.65883684 0.59167594 1.1981866 ]
 [0.6450885  0.5947832  1.1741543 ]
 [0.54988873 0.606696   0.93348396]
 [0.51677275 0.5638462  0.91315746]
 [0.5204073  0.5296877  0.9702945 ]
 [0.5108892  0.5489648  0.92282164]
 [0.4294437  0.44185412 0.8621243 ]] (720, 48, 3)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.66 RMSE
Test Score: 0.51 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(300)]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:300,1,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:300,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(100, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))

model.add(Dense(trainX.shape[2]))

model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_33 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_30"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_33 (LSTM)               (None, 48, 100)           41600     
_________________________________________________________________
dense_48 (Dense)             (None, 48, 3)             303       
=================================================================
Total params: 41,903
Trainable params: 41,903
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C4 = model.fit(trainX,trainY, epochs=20, callbacks=[lr_decay], validation_split=0.05, batch_size= 64)
Epoch 1/20
50/50 [==============================] - 4s 77ms/step - loss: 1.1989 - mae: 0.7027 - val_loss: 0.9880 - val_mae: 0.7396
Epoch 2/20
50/50 [==============================] - 3s 69ms/step - loss: 1.0079 - mae: 0.7211 - val_loss: 0.8987 - val_mae: 0.7166
Epoch 3/20
50/50 [==============================] - 4s 70ms/step - loss: 0.9663 - mae: 0.7040 - val_loss: 0.8880 - val_mae: 0.7272
Epoch 4/20
50/50 [==============================] - 3s 69ms/step - loss: 0.9367 - mae: 0.6960 - val_loss: 0.8668 - val_mae: 0.6902
Epoch 5/20
50/50 [==============================] - 3s 68ms/step - loss: 0.9068 - mae: 0.6835 - val_loss: 0.8755 - val_mae: 0.6628
Epoch 6/20
50/50 [==============================] - 3s 68ms/step - loss: 0.8846 - mae: 0.6693 - val_loss: 0.9104 - val_mae: 0.6504
Epoch 7/20
50/50 [==============================] - 3s 70ms/step - loss: 0.8682 - mae: 0.6655 - val_loss: 0.8383 - val_mae: 0.6672
Epoch 8/20
50/50 [==============================] - 3s 69ms/step - loss: 0.8399 - mae: 0.6516 - val_loss: 0.8408 - val_mae: 0.6484
Epoch 9/20
50/50 [==============================] - 3s 69ms/step - loss: 0.8181 - mae: 0.6412 - val_loss: 0.8378 - val_mae: 0.6508
Epoch 10/20
50/50 [==============================] - 3s 67ms/step - loss: 0.8026 - mae: 0.6300 - val_loss: 0.8377 - val_mae: 0.6531
Epoch 11/20
50/50 [==============================] - 3s 67ms/step - loss: 0.7958 - mae: 0.6298 - val_loss: 0.8481 - val_mae: 0.6525
Epoch 12/20
50/50 [==============================] - 3s 69ms/step - loss: 0.7851 - mae: 0.6237 - val_loss: 0.8587 - val_mae: 0.6614
Epoch 13/20
50/50 [==============================] - 3s 66ms/step - loss: 0.7799 - mae: 0.6242 - val_loss: 0.8497 - val_mae: 0.6553
Epoch 14/20
50/50 [==============================] - 3s 68ms/step - loss: 0.7744 - mae: 0.6188 - val_loss: 0.8511 - val_mae: 0.6522
Epoch 15/20
50/50 [==============================] - 3s 69ms/step - loss: 0.7707 - mae: 0.6155 - val_loss: 0.8517 - val_mae: 0.6523
Epoch 16/20
50/50 [==============================] - 3s 70ms/step - loss: 0.7683 - mae: 0.6147 - val_loss: 0.8557 - val_mae: 0.6554
Epoch 17/20
50/50 [==============================] - 3s 68ms/step - loss: 0.7665 - mae: 0.6137 - val_loss: 0.8581 - val_mae: 0.6524
Epoch 18/20
50/50 [==============================] - 4s 71ms/step - loss: 0.7640 - mae: 0.6120 - val_loss: 0.8580 - val_mae: 0.6522
Epoch 19/20
50/50 [==============================] - 3s 69ms/step - loss: 0.7628 - mae: 0.6118 - val_loss: 0.8588 - val_mae: 0.6542
Epoch 20/20
50/50 [==============================] - 3s 67ms/step - loss: 0.7614 - mae: 0.6095 - val_loss: 0.8608 - val_mae: 0.6536
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C4.history['loss'], label='train')
plt.plot(history_C4.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C4 = model.predict(trainX, verbose = 1)
print(testingtrain_C4[0], testingtrain_C4.shape)

testingtest_C4 = model.predict(testX, verbose = 1)
print(testingtest_C4[0], testingtest_C4.shape)
104/104 [==============================] - 1s 7ms/step
[[0.2232981  0.10241769 0.6577663 ]
 [0.34395346 0.15878558 1.0903144 ]
 [0.42450786 0.22002274 1.3047748 ]
 [0.4697867  0.28968808 1.2821862 ]
 [0.49354514 0.35265085 1.1943712 ]
 [0.50461847 0.4068863  1.060097  ]
 [0.49991226 0.45543945 0.9366353 ]
 [0.49212518 0.50092673 0.8441078 ]
 [0.481212   0.5368135  0.76142704]
 [0.4753962  0.57583094 0.72138566]
 [0.5686527  0.61949563 0.67915064]
 [0.6621595  0.68236065 0.6567025 ]
 [0.6784194  0.7630265  0.72883666]
 [0.72505873 0.8359728  0.8542013 ]
 [0.7343127  0.9415903  1.062488  ]
 [0.7919906  0.9815068  1.2366055 ]
 [0.8262008  1.0062511  1.4105102 ]
 [0.83259684 0.95228827 1.4873337 ]
 [0.81023127 0.9266317  1.4761766 ]
 [0.7803392  0.8516109  1.3140357 ]
 [0.75119835 0.7866701  1.1623063 ]
 [0.7204762  0.7886919  1.0474617 ]
 [0.70651585 0.7741498  1.0032393 ]
 [0.68956345 0.6975609  0.8934588 ]
 [0.66531396 0.61419404 0.8303273 ]
 [0.6267919  0.53042233 0.7255901 ]
 [0.5794316  0.46035007 0.62590957]
 [0.53055763 0.39703533 0.5661715 ]
 [0.4832213  0.34106517 0.53811705]
 [0.43216935 0.28287238 0.45600566]
 [0.380901   0.24353896 0.42848256]
 [0.3357704  0.23611675 0.45992017]
 [0.29162806 0.23291919 0.45516944]
 [0.24076611 0.23390695 0.43106538]
 [0.19685435 0.23034677 0.31881675]
 [0.16146863 0.25214884 0.27249724]
 [0.13409823 0.28756148 0.30380848]
 [0.11604594 0.30780023 0.29120904]
 [0.09467534 0.3218913  0.32208118]
 [0.07125444 0.33555084 0.3767808 ]
 [0.05557672 0.3150514  0.3820761 ]
 [0.04019322 0.2977482  0.4105538 ]
 [0.0329827  0.28523868 0.44340453]
 [0.02862433 0.26783654 0.44864157]
 [0.03866882 0.35074693 0.49321744]
 [0.07803225 0.2140725  0.55549765]
 [0.06295522 0.3212972  0.6087123 ]
 [0.09680536 0.25395346 0.6917833 ]] (3312, 48, 3)
23/23 [==============================] - 0s 7ms/step
[[0.17869642 0.1124132  0.45690706]
 [0.28491247 0.15963878 0.8543926 ]
 [0.3566366  0.20974723 1.0913252 ]
 [0.32034802 0.20492136 0.88069916]
 [0.27454543 0.18405858 0.7303783 ]
 [0.23758033 0.1659958  0.6328644 ]
 [0.21054602 0.1560513  0.57074493]
 [0.19709471 0.15672967 0.5410788 ]
 [0.19284028 0.16696797 0.5377029 ]
 [0.19726935 0.1870634  0.5380691 ]
 [0.21921253 0.2118673  0.5589553 ]
 [0.23616025 0.24555257 0.59631646]
 [0.28028867 0.31218863 0.65272605]
 [0.33362103 0.381456   0.73098373]
 [0.38788944 0.43891317 0.82304066]
 [0.43787032 0.49708515 0.86284643]
 [0.49413595 0.5630597  0.91810775]
 [0.5544329  0.63236    0.98373306]
 [0.61200315 0.6992065  1.0652376 ]
 [0.6751502  0.7756134  1.1268806 ]
 [0.73390675 0.8432152  1.2142998 ]
 [0.79161125 0.91221774 1.290132  ]
 [0.8441265  0.9747101  1.3538842 ]
 [0.89017195 1.0260394  1.4317995 ]
 [0.922599   1.0635322  1.4870685 ]
 [0.94095844 1.0847707  1.5140158 ]
 [0.9425841  1.0838196  1.5422311 ]
 [0.92040426 1.0563781  1.5371829 ]
 [0.88206464 1.0121413  1.4952431 ]
 [0.82211465 0.93986285 1.4537433 ]
 [0.7495925  0.85423267 1.3818803 ]
 [0.66333    0.75749063 1.2820867 ]
 [0.57727426 0.65735066 1.1917015 ]
 [0.49980617 0.5695585  1.1012311 ]
 [0.44046715 0.49050102 1.0591242 ]
 [0.40215322 0.43038613 0.99662405]
 [0.39237836 0.41592988 0.8997657 ]
 [0.3704084  0.3797306  0.87990427]
 [0.35751075 0.31719515 0.9986186 ]
 [0.38137805 0.32710442 0.86895645]
 [0.4063047  0.3559266  0.7884035 ]
 [0.3898377  0.34064987 0.75471437]
 [0.3678582  0.3244957  0.71934223]
 [0.3418942  0.31104028 0.7021429 ]
 [0.32840893 0.30773765 0.69480115]
 [0.3299512  0.31547627 0.69234073]
 [0.34219816 0.3332918  0.7038344 ]
 [0.36058834 0.35557693 0.74234474]] (720, 48, 3)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C4[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C4[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.59 RMSE
Test Score: 0.30 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C4[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C4[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
km_4_SP = KMeans(4).fit(X_SP)
pd.Series.value_counts(km_4_SP.labels_)
Out[ ]:
1    165
0     83
2     37
3     14
dtype: int64
In [ ]:
X_SP['cluster'] = km_4_SP.labels_
In [ ]:
X_SP_C1 = X_SP[X_SP.cluster == 0]
X_SP_C2 = X_SP[X_SP.cluster == 1]
X_SP_C3 = X_SP[X_SP.cluster == 2]
X_SP_C4 = X_SP[X_SP.cluster == 3]
#X_A_C5 = X_A[X_A.cluster == 4]
#X_A_C6 = X_A[X_A.cluster == 5]
In [ ]:
X_SP_C1 = X_SP_C1.drop(columns = 'cluster')
X_SP_C1 = X_SP_C1.transpose()
X_SP_C1.head()
Out[ ]:
11 12 16 20 24 25 26 27 31 33 34 42 47 54 56 57 58 59 61 63 67 69 70 74 79 80 82 84 94 95 100 102 105 107 112 113 114 119 122 125 ... 137 138 143 148 157 159 160 161 170 175 188 189 192 193 195 199 201 203 209 220 222 229 230 234 238 242 243 249 250 254 257 267 271 274 275 279 280 286 290 294
Datetime
2012-09-01 00:00:00 0.147 0.184 0.742 0.400 0.155 0.100 0.519 0.601 0.370 0.120 0.275 0.650 0.107 0.157 0.160 0.063 0.129 0.153 0.304 0.344 0.256 0.358 0.221 0.127 0.831 0.031 0.134 0.158 0.908 0.113 0.075 0.140 0.163 0.260 0.916 0.530 0.078 0.095 0.146 0.242 ... 0.071 0.282 0.407 0.628 0.161 0.426 0.148 0.063 0.315 0.116 0.069 0.043 0.991 0.162 0.189 0.110 0.141 0.081 0.080 0.271 0.215 0.894 0.844 1.765 0.156 0.280 0.125 0.181 0.112 1.100 0.197 0.061 0.324 0.527 0.131 0.094 0.121 0.186 0.124 0.212
2012-09-01 00:30:00 0.127 0.143 0.729 0.338 0.135 0.050 0.463 0.580 0.336 0.088 0.263 0.669 0.141 0.215 0.146 0.075 0.144 0.118 0.244 0.338 0.225 0.333 0.252 0.088 0.763 0.031 0.169 0.157 0.605 0.125 0.088 0.080 0.146 0.295 0.084 0.380 0.082 0.059 0.183 0.185 ... 0.135 0.256 0.339 0.484 0.152 0.930 0.160 0.100 0.271 0.089 0.068 0.082 0.529 0.162 0.392 0.154 0.122 0.079 0.090 0.066 0.261 0.911 0.638 1.486 0.150 0.263 0.151 0.135 0.115 0.997 0.306 0.051 0.236 0.447 0.100 0.075 0.112 0.165 0.101 0.283
2012-09-01 01:00:00 0.142 0.110 0.761 0.350 0.140 0.063 0.413 0.587 0.334 0.151 0.163 0.700 0.105 0.166 0.152 0.050 0.082 0.124 0.306 0.306 0.209 0.167 0.199 0.074 0.613 0.025 0.151 0.258 0.192 0.131 0.050 0.081 0.153 0.143 0.101 0.158 0.090 0.113 0.182 0.216 ... 0.101 0.246 0.296 0.454 0.130 0.248 0.243 0.063 0.255 0.106 0.066 0.033 0.380 0.191 0.290 0.112 0.117 0.066 0.070 0.076 0.187 0.899 0.200 1.716 0.162 0.263 0.110 0.145 0.108 1.062 0.238 0.083 0.557 0.411 0.163 0.081 0.127 0.185 0.064 0.169
2012-09-01 01:30:00 0.143 0.225 0.778 0.350 0.151 0.088 0.538 0.309 0.328 0.129 0.163 0.594 0.100 0.158 0.146 0.050 0.469 0.116 0.339 0.425 0.251 0.232 0.224 0.073 0.638 0.044 0.137 0.214 0.164 0.106 0.081 0.124 0.139 0.134 0.068 0.116 0.073 0.080 0.190 0.445 ... 0.110 0.256 0.213 0.503 0.791 0.238 0.217 0.088 0.287 0.109 0.065 0.051 0.364 0.183 0.313 0.143 0.151 0.122 0.070 0.053 0.284 0.696 0.263 1.487 0.144 0.245 0.185 0.167 0.103 1.017 0.214 0.054 0.178 0.432 0.119 0.069 0.131 0.218 0.108 0.222
2012-09-01 02:00:00 0.133 0.105 0.806 0.450 0.132 0.050 0.463 0.329 0.298 0.136 0.175 0.263 0.154 0.260 0.154 0.063 0.702 0.157 0.303 0.363 0.314 0.171 0.260 0.073 0.375 0.019 0.147 0.233 0.202 0.113 0.156 0.103 0.127 0.245 0.069 0.153 0.102 0.060 0.207 0.791 ... 0.078 0.241 0.237 0.478 0.362 0.188 0.182 0.075 0.246 0.090 0.064 0.059 0.519 0.179 0.855 0.123 0.144 0.121 0.073 0.282 0.185 0.814 0.238 1.386 0.169 0.280 0.080 0.203 0.118 0.963 0.301 0.067 0.642 0.603 0.131 0.094 0.121 0.174 0.096 0.209

5 rows × 83 columns

In [ ]:
X_SP_C1 = X_SP_C1.values
cap = np.percentile(X_SP_C1, 97)   
X_SP_C1[X_SP_C1 > cap] = cap
In [ ]:
training_size=int(X_SP_C1.shape[0]*0.80)

test_size=(X_SP_C1.shape[0])-training_size

train,test=X_SP_C1[0:training_size],X_SP_C1[training_size:(X_SP_C1.shape[0])]
In [ ]:
def get_batches(data, input_interval, target_interval, output_step_offset):
  batched_data_x = []
  batched_data_y = []
  for i in range(input_interval, (len(data) - target_interval - output_step_offset)):
    batched_data_x.append(data[i-input_interval:i])
    batched_data_y.append(data[i+output_step_offset:i+target_interval+output_step_offset])
  batched_data_x, batched_data_y = np.array(batched_data_x), np.array(batched_data_y)
  return batched_data_x, batched_data_y
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(3350, 48, 83) (3350, 48, 83)
(730, 48, 83) (730, 48, 83)
In [ ]:
                             ###Building a sequential network:
Model_1 = models.Sequential()
Model_1.add(layers.Dense(400, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_1.add(Dropout(0.2))
Model_1.add(BatchNormalization())
Model_1.add(layers.Dense(200, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_1.add(Dropout(0.2))
Model_1.add(BatchNormalization())
Model_1.add((Dense(trainX.shape[2])))
Model_1.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_1.summary()
Model: "sequential_31"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_49 (Dense)             (None, 48, 400)           33600     
_________________________________________________________________
dropout_32 (Dropout)         (None, 48, 400)           0         
_________________________________________________________________
batch_normalization_4 (Batch (None, 48, 400)           1600      
_________________________________________________________________
dense_50 (Dense)             (None, 48, 200)           80200     
_________________________________________________________________
dropout_33 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_5 (Batch (None, 48, 200)           800       
_________________________________________________________________
dense_51 (Dense)             (None, 48, 83)            16683     
=================================================================
Total params: 132,883
Trainable params: 131,683
Non-trainable params: 1,200
_________________________________________________________________
In [ ]:
model_train = Model_1.fit(trainX,trainY, epochs=20, validation_split = 0.10, batch_size=64)
Epoch 1/20
48/48 [==============================] - 1s 12ms/step - loss: 0.6583 - mae: 0.6097 - val_loss: 0.0829 - val_mae: 0.1922
Epoch 2/20
48/48 [==============================] - 0s 7ms/step - loss: 0.2080 - mae: 0.3290 - val_loss: 0.0503 - val_mae: 0.1560
Epoch 3/20
48/48 [==============================] - 0s 7ms/step - loss: 0.1207 - mae: 0.2432 - val_loss: 0.0495 - val_mae: 0.1528
Epoch 4/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0836 - mae: 0.2015 - val_loss: 0.0485 - val_mae: 0.1501
Epoch 5/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0644 - mae: 0.1771 - val_loss: 0.0476 - val_mae: 0.1476
Epoch 6/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0539 - mae: 0.1619 - val_loss: 0.0465 - val_mae: 0.1468
Epoch 7/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0480 - mae: 0.1524 - val_loss: 0.0458 - val_mae: 0.1429
Epoch 8/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0448 - mae: 0.1465 - val_loss: 0.0450 - val_mae: 0.1404
Epoch 9/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0429 - mae: 0.1426 - val_loss: 0.0444 - val_mae: 0.1400
Epoch 10/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0417 - mae: 0.1404 - val_loss: 0.0439 - val_mae: 0.1370
Epoch 11/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0407 - mae: 0.1385 - val_loss: 0.0437 - val_mae: 0.1360
Epoch 12/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0401 - mae: 0.1375 - val_loss: 0.0436 - val_mae: 0.1341
Epoch 13/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0394 - mae: 0.1362 - val_loss: 0.0431 - val_mae: 0.1348
Epoch 14/20
48/48 [==============================] - 0s 6ms/step - loss: 0.0388 - mae: 0.1353 - val_loss: 0.0432 - val_mae: 0.1342
Epoch 15/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0382 - mae: 0.1342 - val_loss: 0.0429 - val_mae: 0.1344
Epoch 16/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0376 - mae: 0.1331 - val_loss: 0.0429 - val_mae: 0.1343
Epoch 17/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0370 - mae: 0.1323 - val_loss: 0.0428 - val_mae: 0.1341
Epoch 18/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0365 - mae: 0.1313 - val_loss: 0.0431 - val_mae: 0.1343
Epoch 19/20
48/48 [==============================] - 0s 6ms/step - loss: 0.0359 - mae: 0.1305 - val_loss: 0.0429 - val_mae: 0.1356
Epoch 20/20
48/48 [==============================] - 0s 7ms/step - loss: 0.0354 - mae: 0.1297 - val_loss: 0.0429 - val_mae: 0.1343
In [ ]:
Seq_train = Model_1.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_1.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
105/105 [==============================] - 0s 3ms/step
[[0.25585583 0.2358135  0.4456342  ... 0.227781   0.17950454 0.32811522]
 [0.22388682 0.20747945 0.35284182 ... 0.24879229 0.14480534 0.28712434]
 [0.22215718 0.20771664 0.37929899 ... 0.24054319 0.15264167 0.3059967 ]
 ...
 [0.70271957 0.2713842  0.4931599  ... 0.21210954 0.25797588 0.39742017]
 [0.55133104 0.1857231  0.41304338 ... 0.15651847 0.19362736 0.32492438]
 [0.45107466 0.20489532 0.47513664 ... 0.19974855 0.17611481 0.29912633]] (3350, 48, 83)
23/23 [==============================] - 0s 3ms/step
[[0.6498943  0.44193307 0.33232406 ... 0.4738952  0.3496797  0.5054147 ]
 [0.647191   0.45556605 0.3794263  ... 0.49250925 0.32688013 0.51929164]
 [0.51067716 0.37881082 0.34352267 ... 0.4024142  0.3011753  0.43182242]
 ...
 [0.5666083  0.4245338  0.32734317 ... 0.5489922  0.3760839  0.4204796 ]
 [0.69186014 0.47692335 0.31436002 ... 0.56631714 0.3408026  0.6456464 ]
 [0.6512926  0.42927355 0.34540892 ... 0.51637685 0.37920657 0.59895766]] (730, 48, 83)
In [ ]:
def rmse(actual, pred):
    return np.sqrt(((pred - actual) ** 2).mean())
def mae(actual, pred):
    return np.mean(np.abs(actual - pred))
In [ ]:
testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))

testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
Test Score: 0.14 MAE
Test Score: 0.21 RMSE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
lr_decay = callbacks.LearningRateScheduler(schedule=lambda epoch: 0.001 * (0.80 ** epoch))

model = Sequential()
model.add(LSTM(100, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dense(84))
#model.add(Dropout(.5))
#model.add(LSTM(50, activation='relu', return_sequences = True))
#model.add(Dropout(.01))

model.add(Dense(trainX.shape[2]))
#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_34 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_32"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_34 (LSTM)               (None, 48, 100)           73600     
_________________________________________________________________
dense_52 (Dense)             (None, 48, 83)            8383      
=================================================================
Total params: 81,983
Trainable params: 81,983
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_SP_C1 = model.fit(trainX,trainY, epochs=30, batch_size=64, callbacks=[lr_decay], validation_split = 0.10)
#, callbacks=[lr_decay]
Epoch 1/30
53/53 [==============================] - 4s 74ms/step - loss: 0.0719 - mae: 0.1869
Epoch 2/30
53/53 [==============================] - 4s 74ms/step - loss: 0.0490 - mae: 0.1562
Epoch 3/30
53/53 [==============================] - 4s 72ms/step - loss: 0.0457 - mae: 0.1490
Epoch 4/30
53/53 [==============================] - 4s 74ms/step - loss: 0.0436 - mae: 0.1446
Epoch 5/30
53/53 [==============================] - 4s 72ms/step - loss: 0.0425 - mae: 0.1422
Epoch 6/30
53/53 [==============================] - 4s 73ms/step - loss: 0.0418 - mae: 0.1406
Epoch 7/30
53/53 [==============================] - 4s 75ms/step - loss: 0.0412 - mae: 0.1396
Epoch 8/30
53/53 [==============================] - 4s 72ms/step - loss: 0.0408 - mae: 0.1387
Epoch 9/30
53/53 [==============================] - 4s 74ms/step - loss: 0.0405 - mae: 0.1380
Epoch 10/30
53/53 [==============================] - 4s 77ms/step - loss: 0.0402 - mae: 0.1376
Epoch 11/30
53/53 [==============================] - 4s 73ms/step - loss: 0.0400 - mae: 0.1371
Epoch 12/30
53/53 [==============================] - 4s 74ms/step - loss: 0.0398 - mae: 0.1369
Epoch 13/30
53/53 [==============================] - 4s 70ms/step - loss: 0.0397 - mae: 0.1366
Epoch 14/30
53/53 [==============================] - 4s 72ms/step - loss: 0.0396 - mae: 0.1364
Epoch 15/30
53/53 [==============================] - 4s 75ms/step - loss: 0.0395 - mae: 0.1363
Epoch 16/30
53/53 [==============================] - 4s 75ms/step - loss: 0.0394 - mae: 0.1362
Epoch 17/30
53/53 [==============================] - 4s 73ms/step - loss: 0.0394 - mae: 0.1361
Epoch 18/30
53/53 [==============================] - 4s 72ms/step - loss: 0.0393 - mae: 0.1359
Epoch 19/30
53/53 [==============================] - 4s 74ms/step - loss: 0.0393 - mae: 0.1359
Epoch 20/30
53/53 [==============================] - 4s 72ms/step - loss: 0.0393 - mae: 0.1359
Epoch 21/30
53/53 [==============================] - 4s 72ms/step - loss: 0.0392 - mae: 0.1358
Epoch 22/30
53/53 [==============================] - 4s 71ms/step - loss: 0.0392 - mae: 0.1358
Epoch 23/30
53/53 [==============================] - 4s 71ms/step - loss: 0.0392 - mae: 0.1358
Epoch 24/30
53/53 [==============================] - 4s 71ms/step - loss: 0.0392 - mae: 0.1357
Epoch 25/30
53/53 [==============================] - 4s 72ms/step - loss: 0.0392 - mae: 0.1357
Epoch 26/30
53/53 [==============================] - 4s 73ms/step - loss: 0.0392 - mae: 0.1357
Epoch 27/30
53/53 [==============================] - 4s 73ms/step - loss: 0.0392 - mae: 0.1357
Epoch 28/30
53/53 [==============================] - 4s 73ms/step - loss: 0.0392 - mae: 0.1357
Epoch 29/30
53/53 [==============================] - 4s 74ms/step - loss: 0.0392 - mae: 0.1357
Epoch 30/30
53/53 [==============================] - 4s 73ms/step - loss: 0.0392 - mae: 0.1357
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_SP_C1.history['loss'], label='train')
#plt.plot(history_SP_C1.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C1 = model.predict(trainX, verbose = 1)
print(testingtrain_C1[0], testingtrain_C1.shape)

testingtest_C1 = model.predict(testX, verbose = 1)
print(testingtest_C1[0], testingtest_C1.shape)
105/105 [==============================] - 1s 8ms/step
[[0.2279395  0.18897443 0.2875518  ... 0.18959059 0.15174644 0.2588876 ]
 [0.26178578 0.22180249 0.36256492 ... 0.21852127 0.18746105 0.3224548 ]
 [0.2510702  0.19902168 0.38100314 ... 0.21251327 0.17819764 0.30945212]
 ...
 [0.67948705 0.2785882  0.5584798  ... 0.17291181 0.07186081 0.4009105 ]
 [0.55849284 0.23680754 0.49423245 ... 0.18350513 0.05571609 0.3814928 ]
 [0.47221464 0.19573677 0.46341857 ... 0.17392941 0.06525192 0.3492577 ]] (3350, 48, 83)
23/23 [==============================] - 0s 8ms/step
[[0.5566539  0.45480937 0.45112517 ... 0.4007179  0.34133598 0.58452183]
 [0.61231047 0.4739234  0.4124847  ... 0.42035413 0.3515299  0.6080302 ]
 [0.6271667  0.45899084 0.38193348 ... 0.3767109  0.2986756  0.56985366]
 ...
 [0.5733824  0.59129816 0.43007836 ... 0.5996911  0.49599674 0.48454577]
 [0.60596323 0.5314286  0.43974555 ... 0.55633295 0.42031735 0.59884953]
 [0.6742164  0.48852825 0.46137705 ... 0.5494612  0.45811576 0.6311808 ]] (730, 48, 83)
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

trainMAE = np.mean(mae(trainY[:,1,:], testingtrain_C1[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C1[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.21 RMSE
Test Score: 0.22 RMSE
Train Score: 0.14 MAE
Test Score: 0.15 MAE
In [ ]:
plt.imshow(testY[:47,47,:])
plt.show()
plt.imshow(testingtest_C1[:47,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,30], marker='.', label="actual")
plt.plot(aa, testingtest_C1[:,1,30], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
X_SP_C2 = X_SP_C2.drop(columns = 'cluster')
X_SP_C2 = X_SP_C2.transpose()
X_SP_C2.head()
Out[ ]:
1 3 4 5 7 9 10 13 14 15 18 19 22 23 28 29 30 32 36 37 38 39 40 41 43 44 45 48 49 50 51 53 60 64 65 66 71 72 73 76 ... 227 231 232 233 235 236 237 239 240 241 244 245 247 252 258 259 260 261 263 264 265 266 268 270 272 273 277 278 282 283 284 285 287 288 291 292 295 296 297 298
Datetime
2012-09-01 00:00:00 0.107 0.081 0.110 0.133 0.051 0.014 0.041 0.039 0.129 0.088 0.062 0.094 0.059 0.068 0.094 0.056 0.028 0.085 0.074 0.233 0.043 0.074 0.060 0.106 0.070 0.059 0.058 0.744 0.139 0.025 0.034 0.054 0.215 0.041 0.201 0.075 0.201 0.431 0.127 0.144 ... 0.059 0.119 1.288 0.046 0.028 0.625 0.038 0.070 0.802 0.051 0.144 0.074 0.716 0.079 0.901 0.053 0.0 0.184 1.005 0.031 0.219 0.019 0.071 0.151 0.123 0.093 0.071 0.135 0.175 0.111 0.122 0.339 1.038 0.041 0.244 0.031 0.068 0.028 0.258 0.240
2012-09-01 00:30:00 0.049 0.091 0.108 0.076 0.057 0.047 0.008 0.039 0.124 0.080 0.075 0.063 0.068 0.091 0.094 0.065 0.051 0.060 0.114 0.176 0.050 0.045 0.097 0.113 0.135 0.101 0.076 0.144 0.185 0.025 0.043 0.059 0.056 0.038 0.209 0.075 0.370 0.388 0.079 0.094 ... 0.067 0.136 1.288 0.054 0.043 0.256 0.106 0.093 0.775 0.067 0.094 0.073 0.743 0.118 0.863 0.031 0.0 0.176 1.022 0.057 0.157 0.038 0.114 0.161 0.145 0.058 0.094 0.179 0.162 0.115 0.131 0.340 0.938 0.026 0.270 0.063 0.073 0.041 0.219 0.201
2012-09-01 01:00:00 0.102 0.042 0.098 0.107 0.074 0.012 0.041 0.089 0.126 0.110 0.076 0.088 0.065 0.045 0.100 0.048 0.030 0.062 0.160 0.165 0.025 0.041 0.077 0.035 0.097 0.060 0.068 0.150 0.199 0.025 0.058 0.043 0.083 0.032 0.188 0.088 0.544 0.419 0.067 0.144 ... 0.085 0.100 1.306 0.043 0.044 0.119 0.063 0.076 0.732 0.078 0.094 0.075 0.722 0.093 0.838 0.048 0.0 0.164 0.919 0.035 0.094 0.013 0.054 0.149 0.123 0.101 0.074 0.109 0.150 0.097 0.157 0.344 1.000 0.026 0.181 0.400 0.094 0.047 0.156 0.101
2012-09-01 01:30:00 0.053 0.036 0.099 0.067 0.070 0.011 0.009 0.087 0.105 0.086 0.061 0.100 0.552 0.053 0.100 0.079 0.054 0.059 0.131 0.177 0.047 0.046 0.075 0.064 0.052 0.080 0.059 0.113 0.136 0.031 0.042 0.059 0.083 0.030 0.223 0.075 0.026 0.406 0.141 0.100 ... 0.054 0.132 1.281 0.039 0.027 0.081 0.081 0.089 0.732 0.047 0.113 0.070 0.760 0.103 0.838 0.053 0.0 0.172 0.867 0.063 0.094 0.013 0.056 0.152 0.142 0.071 0.095 0.111 0.106 0.111 0.122 0.336 0.950 0.042 0.163 0.031 0.063 0.018 0.092 0.098
2012-09-01 02:00:00 0.086 0.052 0.107 0.074 0.087 0.011 0.041 0.050 0.111 0.065 0.064 0.088 0.202 0.122 0.100 0.059 0.069 0.058 0.069 0.150 0.028 0.059 0.103 0.034 0.066 0.088 0.105 0.838 0.130 0.031 0.033 0.044 0.077 0.027 0.171 0.063 0.034 0.400 0.117 0.144 ... 0.073 0.123 1.313 0.039 0.048 0.119 0.088 0.076 0.370 0.072 0.156 0.068 0.736 0.106 0.850 0.067 0.0 0.155 0.986 0.031 0.094 0.388 0.132 0.158 0.131 0.053 0.081 0.032 0.088 0.212 0.150 0.321 0.675 0.011 0.107 0.025 0.085 0.054 0.220 0.082

5 rows × 165 columns

In [ ]:
X_SP_C2 = X_SP_C2.values
cap = np.percentile(X_SP_C2, 97)   
X_SP_C2[X_SP_C2 > cap] = cap
In [ ]:
training_size=int(X_SP_C2.shape[0]*0.80)

test_size=(X_SP_C2.shape[0])-training_size

train,test=X_SP_C2[0:training_size],X_SP_C2[training_size:(X_SP_C2.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3350, 48, 165) (3350, 48, 165) 
 (730, 48, 165) (730, 48, 165)
In [ ]:
                             ###Building a sequential network:
Model_2 = models.Sequential()
Model_2.add(layers.Dense(600, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_2.add(Dropout(0.2))
Model_2.add(BatchNormalization())

Model_2.add(layers.Dense(300, activation='relu'))
Model_2.add(Dropout(0.2))
Model_2.add(BatchNormalization())

Model_2.add((Dense(trainX.shape[2])))
Model_2.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_2.summary()
Model: "sequential_33"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_53 (Dense)             (None, 48, 600)           99600     
_________________________________________________________________
dropout_34 (Dropout)         (None, 48, 600)           0         
_________________________________________________________________
batch_normalization_6 (Batch (None, 48, 600)           2400      
_________________________________________________________________
dense_54 (Dense)             (None, 48, 300)           180300    
_________________________________________________________________
dropout_35 (Dropout)         (None, 48, 300)           0         
_________________________________________________________________
batch_normalization_7 (Batch (None, 48, 300)           1200      
_________________________________________________________________
dense_55 (Dense)             (None, 48, 165)           49665     
=================================================================
Total params: 333,165
Trainable params: 331,365
Non-trainable params: 1,800
_________________________________________________________________
In [ ]:
model_train = Model_2.fit(trainX,trainY, epochs=20, validation_split = 0.150, batch_size=64)
Epoch 1/20
45/45 [==============================] - 1s 15ms/step - loss: 0.4826 - mae: 0.4941 - val_loss: 0.0279 - val_mae: 0.1079
Epoch 2/20
45/45 [==============================] - 0s 10ms/step - loss: 0.1368 - mae: 0.2393 - val_loss: 0.0191 - val_mae: 0.0947
Epoch 3/20
45/45 [==============================] - 0s 9ms/step - loss: 0.0772 - mae: 0.1721 - val_loss: 0.0190 - val_mae: 0.0924
Epoch 4/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0488 - mae: 0.1366 - val_loss: 0.0188 - val_mae: 0.0916
Epoch 5/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0338 - mae: 0.1160 - val_loss: 0.0185 - val_mae: 0.0903
Epoch 6/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0259 - mae: 0.1036 - val_loss: 0.0181 - val_mae: 0.0884
Epoch 7/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0217 - mae: 0.0959 - val_loss: 0.0177 - val_mae: 0.0878
Epoch 8/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0195 - mae: 0.0912 - val_loss: 0.0174 - val_mae: 0.0867
Epoch 9/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0182 - mae: 0.0884 - val_loss: 0.0172 - val_mae: 0.0854
Epoch 10/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0175 - mae: 0.0867 - val_loss: 0.0170 - val_mae: 0.0840
Epoch 11/20
45/45 [==============================] - 0s 9ms/step - loss: 0.0170 - mae: 0.0853 - val_loss: 0.0168 - val_mae: 0.0828
Epoch 12/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0167 - mae: 0.0847 - val_loss: 0.0168 - val_mae: 0.0821
Epoch 13/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0164 - mae: 0.0838 - val_loss: 0.0166 - val_mae: 0.0811
Epoch 14/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0161 - mae: 0.0832 - val_loss: 0.0166 - val_mae: 0.0806
Epoch 15/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0158 - mae: 0.0825 - val_loss: 0.0166 - val_mae: 0.0806
Epoch 16/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0156 - mae: 0.0819 - val_loss: 0.0165 - val_mae: 0.0803
Epoch 17/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0154 - mae: 0.0815 - val_loss: 0.0166 - val_mae: 0.0807
Epoch 18/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0151 - mae: 0.0808 - val_loss: 0.0164 - val_mae: 0.0799
Epoch 19/20
45/45 [==============================] - 0s 10ms/step - loss: 0.0148 - mae: 0.0802 - val_loss: 0.0164 - val_mae: 0.0799
Epoch 20/20
45/45 [==============================] - 0s 9ms/step - loss: 0.0146 - mae: 0.0796 - val_loss: 0.0163 - val_mae: 0.0800
In [ ]:
Seq_train = Model_2.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_2.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
105/105 [==============================] - 0s 3ms/step
[[0.21830167 0.07582293 0.10614312 ... 0.0813149  0.20448543 0.14292562]
 [0.21672529 0.07055191 0.10225533 ... 0.07246038 0.20349239 0.12788384]
 [0.24061781 0.06052664 0.10521667 ... 0.07900707 0.21144664 0.12680666]
 ...
 [0.45996806 0.11439624 0.13664116 ... 0.08333121 0.23452567 0.20938724]
 [0.3485203  0.07994034 0.11414963 ... 0.0822785  0.19876482 0.14943334]
 [0.2575525  0.07851645 0.11205464 ... 0.0868905  0.20204666 0.13318929]] (3350, 48, 165)
23/23 [==============================] - 0s 4ms/step
[[0.24435085 0.0582252  0.18985265 ... 0.17553964 0.30953112 0.23737898]
 [0.18117762 0.07702576 0.1542379  ... 0.12771055 0.26987994 0.19630775]
 [0.24958989 0.08756585 0.17062563 ... 0.13567066 0.29986182 0.26222062]
 ...
 [0.23138477 0.08177189 0.28061375 ... 0.20351757 0.29284453 0.26685762]
 [0.2248821  0.06860418 0.2763616  ... 0.21949671 0.3078847  0.23496136]
 [0.27811038 0.06727934 0.23705913 ... 0.20850085 0.3291572  0.22548676]] (730, 48, 165)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.13 RMSE
Test Score: 0.08 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dense(138))

model.add(Dense(trainX.shape[2]))

model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_35 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_34"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_35 (LSTM)               (None, 48, 200)           292800    
_________________________________________________________________
dense_56 (Dense)             (None, 48, 165)           33165     
=================================================================
Total params: 325,965
Trainable params: 325,965
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C2 = model.fit(trainX,trainY, epochs=20, validation_split = 0.10, callbacks=[lr_decay], batch_size=64)
Epoch 1/20
48/48 [==============================] - 4s 79ms/step - loss: 0.0250 - mae: 0.1067 - val_loss: 0.0180 - val_mae: 0.0903
Epoch 2/20
48/48 [==============================] - 4s 78ms/step - loss: 0.0190 - mae: 0.0922 - val_loss: 0.0168 - val_mae: 0.0858
Epoch 3/20
48/48 [==============================] - 4s 74ms/step - loss: 0.0176 - mae: 0.0879 - val_loss: 0.0163 - val_mae: 0.0838
Epoch 4/20
48/48 [==============================] - 4s 76ms/step - loss: 0.0170 - mae: 0.0860 - val_loss: 0.0161 - val_mae: 0.0831
Epoch 5/20
48/48 [==============================] - 4s 76ms/step - loss: 0.0166 - mae: 0.0849 - val_loss: 0.0161 - val_mae: 0.0826
Epoch 6/20
48/48 [==============================] - 4s 77ms/step - loss: 0.0163 - mae: 0.0842 - val_loss: 0.0160 - val_mae: 0.0826
Epoch 7/20
48/48 [==============================] - 4s 79ms/step - loss: 0.0161 - mae: 0.0837 - val_loss: 0.0161 - val_mae: 0.0825
Epoch 8/20
48/48 [==============================] - 4s 76ms/step - loss: 0.0159 - mae: 0.0832 - val_loss: 0.0160 - val_mae: 0.0826
Epoch 9/20
48/48 [==============================] - 4s 77ms/step - loss: 0.0158 - mae: 0.0830 - val_loss: 0.0160 - val_mae: 0.0830
Epoch 10/20
48/48 [==============================] - 4s 76ms/step - loss: 0.0157 - mae: 0.0827 - val_loss: 0.0160 - val_mae: 0.0826
Epoch 11/20
48/48 [==============================] - 4s 77ms/step - loss: 0.0156 - mae: 0.0825 - val_loss: 0.0160 - val_mae: 0.0829
Epoch 12/20
48/48 [==============================] - 4s 76ms/step - loss: 0.0156 - mae: 0.0823 - val_loss: 0.0161 - val_mae: 0.0830
Epoch 13/20
48/48 [==============================] - 4s 75ms/step - loss: 0.0155 - mae: 0.0821 - val_loss: 0.0160 - val_mae: 0.0831
Epoch 14/20
48/48 [==============================] - 4s 83ms/step - loss: 0.0155 - mae: 0.0820 - val_loss: 0.0160 - val_mae: 0.0832
Epoch 15/20
48/48 [==============================] - 4s 76ms/step - loss: 0.0154 - mae: 0.0819 - val_loss: 0.0161 - val_mae: 0.0835
Epoch 16/20
48/48 [==============================] - 4s 76ms/step - loss: 0.0154 - mae: 0.0819 - val_loss: 0.0160 - val_mae: 0.0832
Epoch 17/20
48/48 [==============================] - 4s 74ms/step - loss: 0.0154 - mae: 0.0818 - val_loss: 0.0160 - val_mae: 0.0832
Epoch 18/20
48/48 [==============================] - 4s 74ms/step - loss: 0.0154 - mae: 0.0817 - val_loss: 0.0161 - val_mae: 0.0834
Epoch 19/20
48/48 [==============================] - 4s 76ms/step - loss: 0.0154 - mae: 0.0817 - val_loss: 0.0161 - val_mae: 0.0832
Epoch 20/20
48/48 [==============================] - 3s 71ms/step - loss: 0.0153 - mae: 0.0817 - val_loss: 0.0161 - val_mae: 0.0834
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C2.history['loss'], label='train')
plt.plot(history_C2.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C2 = model.predict(trainX, verbose = 1)
print(testingtrain_C2[0], testingtrain_C2.shape)

testingtest_C2 = model.predict(testX, verbose = 1)
print(testingtest_C2[0], testingtest_C2.shape)
105/105 [==============================] - 1s 9ms/step
[[0.23558792 0.07746454 0.11805908 ... 0.08781317 0.17946175 0.09757292]
 [0.27419922 0.08811729 0.13177517 ... 0.09626745 0.20545094 0.12251934]
 [0.2880928  0.08516176 0.1255368  ... 0.09325008 0.22447339 0.13284068]
 ...
 [0.2889853  0.11810995 0.11103713 ... 0.0517175  0.23359294 0.30119783]
 [0.28607872 0.12064576 0.12295818 ... 0.02502456 0.20704381 0.2656336 ]
 [0.25895435 0.10716854 0.11826538 ... 0.02959037 0.1825148  0.24083553]] (3350, 48, 165)
23/23 [==============================] - 0s 8ms/step
[[0.38664246 0.1117973  0.30051273 ... 0.2289272  0.34312174 0.25206026]
 [0.32677925 0.06782111 0.27169102 ... 0.22090347 0.33399153 0.29098243]
 [0.3263728  0.05476592 0.2538813  ... 0.19560541 0.35547328 0.34848872]
 ...
 [0.21616386 0.08063929 0.38034394 ... 0.24112585 0.32419538 0.25607508]
 [0.21197423 0.05887337 0.34784067 ... 0.235684   0.34358335 0.27047122]
 [0.23826046 0.07503822 0.2880238  ... 0.24580467 0.37225464 0.2819381 ]] (730, 48, 165)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY_RMSE, testingtrain_C2))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C2[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.13 RMSE
Test Score: 0.09 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C2[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C2[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
########cluster 3
X_SP_C3 = X_SP_C3.drop(columns = 'cluster')
X_SP_C3 = X_SP_C3.transpose()
X_SP_C3.head()
Out[ ]:
6 8 17 21 35 46 52 62 89 91 96 98 109 131 134 139 141 147 149 152 158 166 181 185 190 191 200 207 212 228 255 256 262 276 293 299 300
Datetime
2012-09-01 00:00:00 0.563 0.326 0.172 1.749 0.738 0.163 0.083 0.889 0.703 0.266 0.232 0.236 0.500 0.857 0.569 0.570 0.278 0.150 0.125 0.675 0.018 0.162 0.150 0.173 0.427 2.194 1.042 0.608 0.182 1.071 1.086 0.230 0.189 0.109 0.179 0.146 0.524
2012-09-01 00:30:00 0.582 0.153 0.157 1.370 0.743 0.132 0.072 1.075 0.748 0.200 0.115 0.244 0.513 0.950 0.358 0.630 0.260 0.188 0.088 0.238 0.018 0.215 0.163 0.167 0.404 1.139 1.014 0.318 0.111 0.108 1.047 0.261 0.199 0.168 0.108 0.107 0.350
2012-09-01 01:00:00 0.599 0.152 0.144 0.948 0.751 0.091 0.071 1.156 0.732 0.161 0.114 0.198 0.481 0.940 0.581 0.700 0.311 0.213 0.075 0.225 0.017 0.149 0.313 0.164 0.401 0.163 0.998 0.317 0.132 0.239 0.960 0.224 0.170 0.098 0.144 0.058 0.154
2012-09-01 01:30:00 0.627 0.091 0.149 0.382 0.785 0.087 0.079 0.718 0.717 0.106 0.109 0.346 0.475 0.867 0.358 0.253 0.371 0.188 0.413 0.213 0.017 0.157 0.775 0.162 0.443 0.144 0.991 0.263 0.152 0.208 1.141 0.265 0.175 0.114 0.157 0.107 0.112
2012-09-01 02:00:00 0.612 0.155 0.286 0.252 0.779 0.164 0.068 0.703 0.710 0.045 0.099 0.930 0.494 0.932 0.946 0.156 0.927 0.138 0.138 0.250 0.017 0.159 0.563 0.166 0.391 0.145 0.933 1.272 0.111 0.131 1.055 0.227 0.175 0.208 0.118 0.086 0.149
In [ ]:
X_SP_C3 = X_SP_C3.values
cap = np.percentile(X_SP_C2, 97)   
X_SP_C2[X_SP_C2 > cap] = cap
In [ ]:
training_size=int(X_SP_C3.shape[0]*0.80)

test_size=(X_SP_C3.shape[0])-training_size

train,test=X_SP_C3[0:training_size],X_SP_C3[training_size:(X_SP_C3.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)


print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3350, 48, 37) (3350, 48, 37) 
 (730, 48, 37) (730, 48, 37)
In [ ]:
                             ###Building a sequential network:
Model_3 = models.Sequential()
Model_3.add(layers.Dense(200, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_3.add(Dropout(0.2))
Model_3.add(BatchNormalization())

Model_3.add(layers.Dense(100, activation='relu'))
Model_3.add(Dropout(0.2))
Model_3.add(BatchNormalization())

Model_3.add((Dense(trainX.shape[2])))
Model_3.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_3.summary()
Model: "sequential_35"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_57 (Dense)             (None, 48, 200)           7600      
_________________________________________________________________
dropout_36 (Dropout)         (None, 48, 200)           0         
_________________________________________________________________
batch_normalization_8 (Batch (None, 48, 200)           800       
_________________________________________________________________
dense_58 (Dense)             (None, 48, 100)           20100     
_________________________________________________________________
dropout_37 (Dropout)         (None, 48, 100)           0         
_________________________________________________________________
batch_normalization_9 (Batch (None, 48, 100)           400       
_________________________________________________________________
dense_59 (Dense)             (None, 48, 37)            3737      
=================================================================
Total params: 32,637
Trainable params: 32,037
Non-trainable params: 600
_________________________________________________________________
In [ ]:
model_train = Model_3.fit(trainX,trainY, epochs=20, validation_split = 0.10, batch_size=64)
Epoch 1/20
48/48 [==============================] - 0s 9ms/step - loss: 1.0086 - mae: 0.7583 - val_loss: 0.2226 - val_mae: 0.3348
Epoch 2/20
48/48 [==============================] - 0s 6ms/step - loss: 0.4550 - mae: 0.4994 - val_loss: 0.1366 - val_mae: 0.2455
Epoch 3/20
48/48 [==============================] - 0s 5ms/step - loss: 0.2804 - mae: 0.3752 - val_loss: 0.1114 - val_mae: 0.2351
Epoch 4/20
48/48 [==============================] - 0s 5ms/step - loss: 0.2163 - mae: 0.3250 - val_loss: 0.1066 - val_mae: 0.2279
Epoch 5/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1821 - mae: 0.2955 - val_loss: 0.1029 - val_mae: 0.2239
Epoch 6/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1605 - mae: 0.2755 - val_loss: 0.1011 - val_mae: 0.2190
Epoch 7/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1465 - mae: 0.2615 - val_loss: 0.0990 - val_mae: 0.2155
Epoch 8/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1374 - mae: 0.2519 - val_loss: 0.0974 - val_mae: 0.2129
Epoch 9/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1309 - mae: 0.2448 - val_loss: 0.0965 - val_mae: 0.2113
Epoch 10/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1260 - mae: 0.2397 - val_loss: 0.0949 - val_mae: 0.2075
Epoch 11/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1225 - mae: 0.2358 - val_loss: 0.0943 - val_mae: 0.2079
Epoch 12/20
48/48 [==============================] - 0s 6ms/step - loss: 0.1198 - mae: 0.2330 - val_loss: 0.0936 - val_mae: 0.2056
Epoch 13/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1176 - mae: 0.2309 - val_loss: 0.0930 - val_mae: 0.2040
Epoch 14/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1156 - mae: 0.2289 - val_loss: 0.0928 - val_mae: 0.2040
Epoch 15/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1138 - mae: 0.2271 - val_loss: 0.0923 - val_mae: 0.2031
Epoch 16/20
48/48 [==============================] - 0s 6ms/step - loss: 0.1122 - mae: 0.2255 - val_loss: 0.0919 - val_mae: 0.2025
Epoch 17/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1106 - mae: 0.2238 - val_loss: 0.0915 - val_mae: 0.2026
Epoch 18/20
48/48 [==============================] - 0s 6ms/step - loss: 0.1089 - mae: 0.2227 - val_loss: 0.0914 - val_mae: 0.2020
Epoch 19/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1075 - mae: 0.2213 - val_loss: 0.0917 - val_mae: 0.2015
Epoch 20/20
48/48 [==============================] - 0s 5ms/step - loss: 0.1062 - mae: 0.2203 - val_loss: 0.0913 - val_mae: 0.2009
In [ ]:
Seq_train = Model_3.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_3.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
105/105 [==============================] - 0s 2ms/step
[[0.31185862 0.21461746 0.22940919 ... 0.28261402 0.25125784 0.6296032 ]
 [0.31539267 0.19388434 0.17194153 ... 0.2329413  0.18559906 0.5825225 ]
 [0.2925578  0.20266603 0.18653312 ... 0.29473257 0.14500451 0.37253895]
 ...
 [0.4384648  0.34718448 0.423933   ... 0.29972696 0.6802288  0.26010156]
 [0.42028946 0.2696163  0.22244006 ... 0.21783236 0.4404832  0.5213151 ]
 [0.45456883 0.19305499 0.21557145 ... 0.31119832 0.29304767 0.6359571 ]] (3350, 48, 37)
23/23 [==============================] - 0s 2ms/step
[[0.64791024 0.78324187 0.53887844 ... 0.46498805 0.42457134 1.0809251 ]
 [0.50301397 0.48573905 0.62002647 ... 0.46309215 0.6180491  0.7681947 ]
 [0.514925   0.51548797 0.48415673 ... 0.6087462  0.5574774  0.72225016]
 ...
 [0.573985   0.6656606  0.46528572 ... 0.44116205 0.35869968 1.2322013 ]
 [0.62389696 0.65076685 0.4871143  ... 0.4618945  0.49113142 1.3846786 ]
 [0.49395514 0.51625985 0.33842304 ... 0.43699574 0.43328422 0.8518366 ]] (730, 48, 37)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.33 RMSE
Test Score: 0.21 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(100, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))

model.add(Dense(trainX.shape[2]))

model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_36 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_36"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_36 (LSTM)               (None, 48, 100)           55200     
_________________________________________________________________
dense_60 (Dense)             (None, 48, 37)            3737      
=================================================================
Total params: 58,937
Trainable params: 58,937
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C3 = model.fit(trainX,trainY, epochs=20, batch_size=64, validation_split = 0.10, callbacks=[lr_decay])
Epoch 1/20
48/48 [==============================] - 4s 75ms/step - loss: 0.2069 - mae: 0.3159 - val_loss: 0.1202 - val_mae: 0.2530
Epoch 2/20
48/48 [==============================] - 3s 72ms/step - loss: 0.1399 - mae: 0.2594 - val_loss: 0.1092 - val_mae: 0.2355
Epoch 3/20
48/48 [==============================] - 3s 68ms/step - loss: 0.1284 - mae: 0.2450 - val_loss: 0.1056 - val_mae: 0.2285
Epoch 4/20
48/48 [==============================] - 4s 74ms/step - loss: 0.1227 - mae: 0.2382 - val_loss: 0.1035 - val_mae: 0.2256
Epoch 5/20
48/48 [==============================] - 3s 70ms/step - loss: 0.1191 - mae: 0.2337 - val_loss: 0.1028 - val_mae: 0.2256
Epoch 6/20
48/48 [==============================] - 3s 72ms/step - loss: 0.1165 - mae: 0.2311 - val_loss: 0.1012 - val_mae: 0.2223
Epoch 7/20
48/48 [==============================] - 3s 69ms/step - loss: 0.1146 - mae: 0.2287 - val_loss: 0.1004 - val_mae: 0.2214
Epoch 8/20
48/48 [==============================] - 3s 72ms/step - loss: 0.1129 - mae: 0.2269 - val_loss: 0.1001 - val_mae: 0.2208
Epoch 9/20
48/48 [==============================] - 3s 69ms/step - loss: 0.1117 - mae: 0.2256 - val_loss: 0.0997 - val_mae: 0.2202
Epoch 10/20
48/48 [==============================] - 3s 72ms/step - loss: 0.1106 - mae: 0.2241 - val_loss: 0.0997 - val_mae: 0.2206
Epoch 11/20
48/48 [==============================] - 3s 72ms/step - loss: 0.1097 - mae: 0.2232 - val_loss: 0.0993 - val_mae: 0.2190
Epoch 12/20
48/48 [==============================] - 3s 70ms/step - loss: 0.1090 - mae: 0.2224 - val_loss: 0.0994 - val_mae: 0.2199
Epoch 13/20
48/48 [==============================] - 3s 72ms/step - loss: 0.1084 - mae: 0.2218 - val_loss: 0.0993 - val_mae: 0.2197
Epoch 14/20
48/48 [==============================] - 4s 74ms/step - loss: 0.1080 - mae: 0.2213 - val_loss: 0.0991 - val_mae: 0.2189
Epoch 15/20
48/48 [==============================] - 3s 72ms/step - loss: 0.1076 - mae: 0.2209 - val_loss: 0.0992 - val_mae: 0.2196
Epoch 16/20
48/48 [==============================] - 4s 74ms/step - loss: 0.1073 - mae: 0.2206 - val_loss: 0.0991 - val_mae: 0.2191
Epoch 17/20
48/48 [==============================] - 4s 74ms/step - loss: 0.1070 - mae: 0.2203 - val_loss: 0.0990 - val_mae: 0.2190
Epoch 18/20
48/48 [==============================] - 3s 71ms/step - loss: 0.1068 - mae: 0.2201 - val_loss: 0.0991 - val_mae: 0.2192
Epoch 19/20
48/48 [==============================] - 4s 76ms/step - loss: 0.1067 - mae: 0.2199 - val_loss: 0.0991 - val_mae: 0.2194
Epoch 20/20
48/48 [==============================] - 3s 72ms/step - loss: 0.1065 - mae: 0.2199 - val_loss: 0.0991 - val_mae: 0.2192
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C3.history['loss'], label='train')
plt.plot(history_C3.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C3 = model.predict(trainX, verbose = 1)
print(testingtrain_C3[0], testingtrain_C3.shape)

testingtest_C3 = model.predict(testX, verbose = 1)
print(testingtest_C3[0], testingtest_C3.shape)
105/105 [==============================] - 1s 8ms/step
[[ 0.3105225   0.27650908  0.15987754 ...  0.30295834  0.1622762
   0.33187437]
 [ 0.37899154  0.32013243  0.18468075 ...  0.35774925  0.2279376
   0.4507788 ]
 [ 0.374182    0.32932094  0.18585892 ...  0.37497732  0.26541024
   0.45886758]
 ...
 [ 0.6522929   0.4493202   0.57734877 ...  0.17250156  0.32885608
   0.6188862 ]
 [ 0.59180015  0.46194825  0.5130811  ...  0.00237956  0.11714733
   0.82289076]
 [ 0.45498353  0.39879006  0.54929274 ... -0.08607152 -0.05945078
   0.7713693 ]] (3350, 48, 37)
23/23 [==============================] - 0s 8ms/step
[[0.37734294 0.5505203  0.35124877 ... 0.42819482 0.35743734 0.4119725 ]
 [0.50591314 0.6799939  0.528646   ... 0.6913221  0.52365994 0.64244944]
 [0.55308354 0.73891944 0.5270712  ... 0.87107486 0.5629902  0.73513395]
 ...
 [0.5140939  0.7733887  0.38866124 ... 0.50245243 0.50094295 0.84976953]
 [0.62851846 0.9750443  0.35642958 ... 0.513784   0.55352235 0.85078204]
 [0.549905   0.9018943  0.44588307 ... 0.5034483  0.5385716  0.859338  ]] (730, 48, 37)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


testMAE = np.mean(mae(testY[:,1,:], testingtest_C3[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.34 RMSE
Test Score: 0.23 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C3[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C3[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
########cluster 4
X_SP_C4 = X_SP[X_SP.cluster == 3]
X_SP_C4 = X_SP_C4.drop(columns = 'cluster')
X_SP_C4 = X_SP_C4.transpose()
X_SP_C4.head()
Out[ ]:
55 68 75 104 145 156 206 246 248 251 253 269 281 289
Datetime
2012-09-01 00:00:00 0.437 0.217 0.848 0.456 0.751 0.313 0.513 0.641 1.982 0.301 1.061 0.600 0.719 0.664
2012-09-01 00:30:00 0.445 0.304 1.991 0.214 0.754 0.175 0.489 0.612 0.250 0.313 1.085 0.601 0.647 0.677
2012-09-01 01:00:00 0.407 0.901 0.815 0.302 0.751 0.125 0.539 0.614 0.195 2.247 0.930 0.663 0.668 0.652
2012-09-01 01:30:00 0.391 0.840 0.109 0.285 0.731 0.188 0.610 0.614 0.228 2.287 1.002 0.588 0.729 0.083
2012-09-01 02:00:00 0.781 0.903 0.118 0.349 0.762 0.150 0.559 0.647 0.202 2.249 1.033 0.626 0.124 0.102
In [ ]:
X_SP_C4 = X_SP_C4.values
cap = np.percentile(X_SP_C4, 97)   
X_SP_C4[X_SP_C4 > cap] = cap
In [ ]:
X_SP_C4.max()
Out[ ]:
1.516470000000001
In [ ]:
training_size=int(X_SP_C4.shape[0]*0.80)

test_size=(X_SP_C4.shape[0])-training_size

train,test=X_SP_C4[0:training_size],X_SP_C4[training_size:(X_SP_C4.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3350, 48, 14) (3350, 48, 14) 
 (730, 48, 14) (730, 48, 14)
In [ ]:
                            ###Building a sequential network:
Model_4 = models.Sequential()
Model_4.add(layers.Dense(100, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model_4.add(Dropout(0.2))
Model_4.add(BatchNormalization())
Model_4.add(layers.Dense(50, activation='relu'))
Model_4.add(Dropout(0.2))
Model_4.add(BatchNormalization())
Model_4.add((Dense(trainX.shape[2])))
Model_4.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model_4.summary()
Model: "sequential_37"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_61 (Dense)             (None, 48, 100)           1500      
_________________________________________________________________
dropout_38 (Dropout)         (None, 48, 100)           0         
_________________________________________________________________
batch_normalization_10 (Batc (None, 48, 100)           400       
_________________________________________________________________
dense_62 (Dense)             (None, 48, 50)            5050      
_________________________________________________________________
dropout_39 (Dropout)         (None, 48, 50)            0         
_________________________________________________________________
batch_normalization_11 (Batc (None, 48, 50)            200       
_________________________________________________________________
dense_63 (Dense)             (None, 48, 14)            714       
=================================================================
Total params: 7,864
Trainable params: 7,564
Non-trainable params: 300
_________________________________________________________________
In [ ]:
model_train = Model_4.fit(trainX,trainY, epochs=30, batch_size=32, validation_split = 0.10)
Epoch 1/30
95/95 [==============================] - 1s 6ms/step - loss: 0.7386 - mae: 0.6463 - val_loss: 0.1281 - val_mae: 0.2538
Epoch 2/30
95/95 [==============================] - 0s 5ms/step - loss: 0.2613 - mae: 0.3805 - val_loss: 0.0965 - val_mae: 0.2393
Epoch 3/30
95/95 [==============================] - 0s 4ms/step - loss: 0.1553 - mae: 0.2884 - val_loss: 0.0826 - val_mae: 0.2178
Epoch 4/30
95/95 [==============================] - 0s 5ms/step - loss: 0.1119 - mae: 0.2424 - val_loss: 0.0734 - val_mae: 0.2011
Epoch 5/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0912 - mae: 0.2164 - val_loss: 0.0682 - val_mae: 0.1891
Epoch 6/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0795 - mae: 0.2003 - val_loss: 0.0656 - val_mae: 0.1815
Epoch 7/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0735 - mae: 0.1908 - val_loss: 0.0642 - val_mae: 0.1780
Epoch 8/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0700 - mae: 0.1851 - val_loss: 0.0637 - val_mae: 0.1758
Epoch 9/30
95/95 [==============================] - 0s 5ms/step - loss: 0.0679 - mae: 0.1815 - val_loss: 0.0627 - val_mae: 0.1736
Epoch 10/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0665 - mae: 0.1790 - val_loss: 0.0629 - val_mae: 0.1735
Epoch 11/30
95/95 [==============================] - 0s 5ms/step - loss: 0.0655 - mae: 0.1775 - val_loss: 0.0616 - val_mae: 0.1699
Epoch 12/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0647 - mae: 0.1759 - val_loss: 0.0618 - val_mae: 0.1716
Epoch 13/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0643 - mae: 0.1753 - val_loss: 0.0618 - val_mae: 0.1710
Epoch 14/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0637 - mae: 0.1742 - val_loss: 0.0614 - val_mae: 0.1692
Epoch 15/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0632 - mae: 0.1733 - val_loss: 0.0608 - val_mae: 0.1691
Epoch 16/30
95/95 [==============================] - 0s 5ms/step - loss: 0.0629 - mae: 0.1729 - val_loss: 0.0606 - val_mae: 0.1676
Epoch 17/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0625 - mae: 0.1722 - val_loss: 0.0605 - val_mae: 0.1687
Epoch 18/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0621 - mae: 0.1717 - val_loss: 0.0610 - val_mae: 0.1698
Epoch 19/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0618 - mae: 0.1712 - val_loss: 0.0602 - val_mae: 0.1669
Epoch 20/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0615 - mae: 0.1705 - val_loss: 0.0603 - val_mae: 0.1674
Epoch 21/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0613 - mae: 0.1700 - val_loss: 0.0597 - val_mae: 0.1673
Epoch 22/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0609 - mae: 0.1696 - val_loss: 0.0599 - val_mae: 0.1667
Epoch 23/30
95/95 [==============================] - 0s 5ms/step - loss: 0.0608 - mae: 0.1692 - val_loss: 0.0598 - val_mae: 0.1669
Epoch 24/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0606 - mae: 0.1690 - val_loss: 0.0592 - val_mae: 0.1649
Epoch 25/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0603 - mae: 0.1685 - val_loss: 0.0596 - val_mae: 0.1663
Epoch 26/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0601 - mae: 0.1681 - val_loss: 0.0592 - val_mae: 0.1645
Epoch 27/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0599 - mae: 0.1677 - val_loss: 0.0590 - val_mae: 0.1663
Epoch 28/30
95/95 [==============================] - 0s 5ms/step - loss: 0.0597 - mae: 0.1676 - val_loss: 0.0593 - val_mae: 0.1659
Epoch 29/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0595 - mae: 0.1673 - val_loss: 0.0594 - val_mae: 0.1653
Epoch 30/30
95/95 [==============================] - 0s 4ms/step - loss: 0.0592 - mae: 0.1666 - val_loss: 0.0587 - val_mae: 0.1648
In [ ]:
Seq_train = Model_4.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model_4.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
105/105 [==============================] - 0s 2ms/step
[[0.81849426 0.29545692 0.6143813  1.0830798  0.6292368  0.33214182
  0.39376566 0.6520514  0.7350544  0.31395334 0.7550273  0.50375897
  0.53749776 0.5503236 ]
 [0.7131357  0.35933968 1.1046098  0.94022286 0.6611703  0.30424705
  0.41033602 0.6853829  0.34988704 0.27088594 0.78476906 0.5256327
  0.5358296  0.6146953 ]
 [0.69923294 0.75472325 0.601541   0.6878904  0.6810665  0.18426615
  0.49030852 0.6476435  0.22076291 1.3666484  0.7426915  0.6110513
  0.6957012  0.63670737]
 [0.68570024 0.7930202  0.22302607 0.482695   0.69555724 0.18339705
  0.50680953 0.63178337 0.26031816 1.422678   0.76699597 0.5949823
  0.60122687 0.22251028]
 [0.547343   0.8156979  0.10612749 0.34165046 0.69374204 0.16046731
  0.5244369  0.648292   0.18449184 1.1667645  0.6045437  0.5840476
  0.34584117 0.12549272]
 [0.45428604 0.83771193 0.07452923 0.28872144 0.6858938  0.15099384
  0.5181304  0.64403594 0.16821066 1.0544308  0.45480084 0.5672497
  0.28693676 0.1642709 ]
 [0.43452254 0.85627925 0.05703413 0.27822194 0.69416356 0.1477557
  0.52732444 0.65654266 0.16753429 1.0438287  0.43423855 0.55924225
  0.27246323 0.16334938]
 [0.4514284  0.80845976 0.12264931 0.3258308  0.7226138  0.21043421
  0.54331577 0.66470265 0.21349066 0.55318654 0.34667447 0.5319331
  0.18413445 0.16307017]
 [0.35945457 0.717864   0.1748228  0.34285155 0.6150265  0.16975993
  0.54190964 0.635608   0.23359293 0.27650392 0.2636342  0.28859782
  0.11816568 0.1661403 ]
 [0.46173865 0.5455501  0.20841303 0.40038216 0.57530314 0.22846356
  0.46715117 0.5933875  0.19337508 0.29700825 0.37047014 0.23415829
  0.21234412 0.2400347 ]
 [0.29881382 0.28129426 0.57521594 0.4496089  0.18293722 0.17034584
  0.35257086 0.39749345 0.26015216 0.1778747  0.26381242 0.09578305
  0.22767866 0.25341353]
 [0.3064818  0.26835907 0.63066983 0.44206458 0.15444127 0.19310802
  0.3958265  0.34251386 0.31024924 0.22184344 0.27821332 0.12096533
  0.24471036 0.23210865]
 [0.3847441  0.2694379  0.6006569  0.44799253 0.23441085 0.22140725
  0.34110337 0.32559162 0.30624214 0.24702501 0.34747368 0.15250544
  0.26364088 0.24400048]
 [0.42254442 0.29900643 0.4207315  0.46710247 0.30072975 0.23423861
  0.36788476 0.38301593 0.28767252 0.27844608 0.38880232 0.17603607
  0.28139225 0.22764027]
 [0.46543208 0.28439444 0.41253966 0.495903   0.31432906 0.24848905
  0.35859305 0.38933325 0.27655208 0.23956715 0.4345039  0.15244685
  0.2910152  0.23622692]
 [0.4583963  0.2861526  0.40068284 0.50862324 0.31519887 0.2955419
  0.326792   0.38768253 0.24862987 0.22193527 0.41319573 0.14582929
  0.2710951  0.26611763]
 [0.4949958  0.4050881  0.35754907 0.51305217 0.4613628  0.2906566
  0.41223335 0.4594648  0.25481665 0.25733888 0.4013436  0.22623926
  0.24666312 0.24426307]
 [0.5013534  0.25733274 0.36375406 0.47964793 0.32879713 0.28591245
  0.28733915 0.3423696  0.3050374  0.228068   0.47371382 0.15497853
  0.28595203 0.2680167 ]
 [0.5108594  0.24748799 0.37545145 0.47207475 0.3398678  0.295299
  0.25290117 0.3423882  0.24271925 0.2679388  0.49794096 0.1838227
  0.28943044 0.30884394]
 [0.54024047 0.1868307  0.44928366 0.39414582 0.35725144 0.3198287
  0.17472953 0.253803   0.2976279  0.22814506 0.6138272  0.17030281
  0.24513307 0.3216226 ]
 [0.4454941  0.20836574 0.39260772 0.47206798 0.22889166 1.0444909
  0.21899639 0.3085784  0.3081146  0.20969927 0.43382233 0.16453043
  0.24982083 0.351169  ]
 [0.34256297 0.16807427 0.44718307 0.41150278 0.15516202 1.4685564
  0.12783581 0.23022519 0.29757628 0.19359136 0.31351838 0.1919984
  0.27344948 0.27183545]
 [0.3625118  0.16506505 0.48502314 0.430719   0.15123299 1.4625251
  0.10481361 0.21183133 0.3145083  0.2049056  0.2917592  0.20655598
  0.28588587 0.26351237]
 [0.44453275 0.2736109  0.45480293 0.5210671  0.26318622 0.61187875
  0.2599777  0.34143558 0.2963051  0.1765815  0.3728994  0.14092079
  0.2611343  0.2419737 ]
 [0.49135733 0.27670902 0.3440307  0.46293238 0.29372016 0.24911636
  0.31525797 0.36913303 0.3879575  0.2349691  0.45827842 0.12510201
  0.2848059  0.29933476]
 [0.443668   0.21051762 0.35173216 0.42198968 0.29094976 0.23991029
  0.18270689 0.26186737 0.2531192  0.21098328 0.45901185 0.14962952
  0.2829012  0.27740222]
 [0.47138405 0.25565276 0.34971505 0.47574526 0.31237367 0.2567429
  0.26640654 0.31999534 0.27749887 0.23064862 0.44811124 0.14481747
  0.29246798 0.2665242 ]
 [0.47753692 0.24462637 0.3300994  0.47574615 0.3235544  0.23606169
  0.25218624 0.32395414 0.29961255 0.23998114 0.45774633 0.15869173
  0.30888453 0.28512132]
 [0.45435604 0.22629333 0.33303446 0.42909765 0.3108593  0.221674
  0.21728948 0.28582144 0.24780886 0.23278552 0.48694012 0.15287456
  0.29288304 0.2656548 ]
 [0.44039157 0.22436778 0.34677085 0.4355582  0.29470268 0.22783673
  0.21569927 0.2753613  0.25180942 0.22260252 0.4654695  0.14699128
  0.28602636 0.26445532]
 [0.44111395 0.22146207 0.3665706  0.43562496 0.29864866 0.24453051
  0.2048977  0.26367673 0.2659724  0.22149372 0.4565846  0.15369394
  0.28305918 0.2647998 ]
 [0.54285836 0.22979619 0.43744898 0.46917835 0.33214384 0.30044287
  0.2523944  0.3457387  0.31024247 0.23842692 0.5206341  0.16738093
  0.27690667 0.32196575]
 [0.48209682 0.33763963 0.36872408 0.4934113  0.37322804 0.26801866
  0.32597324 0.4254215  0.22157119 0.29005522 0.44445494 0.17516764
  0.28605956 0.28494152]
 [0.49664146 0.23553306 0.43505916 0.46928295 0.3161521  0.24160457
  0.2520509  0.3381799  0.208951   0.23645875 0.5209438  0.15786329
  0.29176527 0.30702928]
 [0.5654828  0.33297077 0.3620424  0.57083124 0.41477856 0.2726777
  0.3606352  0.41662133 0.2955596  0.36902243 0.50736785 0.22187845
  0.3460698  0.28095976]
 [0.5967396  0.30475518 0.3020318  0.53585577 0.43882143 0.33586356
  0.34954667 0.4942848  0.28702757 0.30050522 0.5771125  0.23052897
  0.2723567  0.29801545]
 [0.67636764 0.2754895  0.36821574 0.6721446  0.46717864 0.41557184
  0.3293659  0.48689917 0.4570487  0.29834294 0.6214784  0.26299745
  0.33325866 0.31698063]
 [0.73017526 0.3064024  0.4447919  0.70369947 0.5326686  0.41062394
  0.35233253 0.45894444 0.48339364 0.32682264 0.568972   0.29812482
  0.36327195 0.27533823]
 [0.70671225 0.2884244  0.41854987 0.757622   0.49289864 0.41599655
  0.28628907 0.41609573 0.53164655 0.32004017 0.56839836 0.33286214
  0.3684374  0.2856534 ]
 [0.68309057 0.31346813 0.3357479  0.66504467 0.43900877 0.36009306
  0.36723068 0.5183997  0.36838266 0.2954758  0.57538354 0.20163293
  0.31060567 0.4900257 ]
 [0.70387805 0.35935804 0.31490573 0.61933064 0.47015998 0.33077875
  0.41087317 0.53980094 0.38265052 0.29706508 0.56179774 0.15960798
  0.25297433 0.6068961 ]
 [0.74353576 0.31174505 0.35491744 0.608908   0.43816724 0.3292176
  0.3636179  0.49415803 0.31752232 0.43807817 0.6228641  0.23667839
  0.32400522 0.61492354]
 [0.80857885 0.35391325 0.6860628  0.5113474  0.62022024 0.38984442
  0.2947421  0.47969902 0.31719738 0.6465907  0.7520975  0.41569224
  0.34806266 0.56676817]
 [0.7454169  0.34096947 0.540732   0.5203529  0.5902883  0.44975156
  0.27322364 0.5250446  0.33173728 0.47827375 0.75920784 0.46024173
  0.37901255 0.19019699]
 [0.7099402  0.19251323 0.8576509  0.38943833 0.6503867  1.454741
  0.22452897 0.5187032  0.33060396 0.25754786 0.7203723  0.5908004
  0.46901572 0.21657565]
 [0.6832384  0.18460298 0.83056986 0.4140782  0.622187   1.4517114
  0.1977695  0.42201033 0.3421767  0.26551703 0.5953051  0.6536945
  0.66729164 0.21901287]
 [0.82118183 0.15828717 0.4088293  1.4576094  0.7011698  1.0366015
  0.13184054 0.50495005 1.3365707  0.23241359 0.5763078  0.8298165
  0.6549177  0.28951547]
 [0.83486295 0.20137747 0.378675   1.5589453  0.70203245 0.4706416
  0.4313597  0.70408016 1.2268866  0.29834417 0.71748286 0.739979
  0.6388791  0.56939924]] (3350, 48, 14)
23/23 [==============================] - 0s 2ms/step
[[0.68173575 0.28124276 0.29524451 0.54859626 0.41364357 0.37153307
  0.3711533  0.4248011  0.41029906 0.25664735 0.6861137  0.16247208
  0.2891743  0.40368825]
 [0.6952348  0.28253418 0.31531525 0.5741754  0.42895332 0.36349908
  0.43663397 0.4834779  0.44651505 0.2615887  0.6768612  0.16349964
  0.30314666 0.39686292]
 [0.68355703 0.30350626 0.32154003 0.6013788  0.41124535 0.32853273
  0.45055497 0.47180665 0.45967007 0.30640012 0.6111425  0.18941185
  0.35524788 0.43619782]
 [0.7293979  0.30108297 0.32390356 0.6258946  0.41618377 0.3772631
  0.48456848 0.48982498 0.5259111  0.2926838  0.62253153 0.18203647
  0.34967875 0.49991158]
 [0.6652437  0.28670612 0.31745836 0.5601793  0.39189756 0.34295043
  0.37514135 0.42451054 0.40545627 0.27971372 0.6079309  0.18117431
  0.32928365 0.38819236]
 [0.7403989  0.27615273 0.32772171 0.559093   0.36799258 0.36373827
  0.39665005 0.4544527  0.38028538 0.2932723  0.6939864  0.14693943
  0.30625045 0.4284894 ]
 [0.74572515 0.24400006 0.9201281  0.5694043  0.5805577  0.42792258
  0.25458264 0.37583017 0.35733417 0.28531355 0.7253308  0.3866828
  0.31855908 0.3582241 ]
 [0.7238235  0.22376366 1.0738747  0.5037326  0.61722606 0.4502365
  0.22065702 0.3505464  0.3003297  0.2611046  0.73251534 0.45060718
  0.31275338 0.29649475]
 [0.83987164 0.19765812 1.0451859  0.4071394  0.7458124  1.2637811
  0.24506539 0.5142507  0.274627   0.24229571 0.7921804  0.6610139
  0.50513184 0.24321792]
 [0.7698474  0.19334303 0.8425113  0.44920775 0.662158   1.2917143
  0.3491828  0.5740484  0.29695854 0.26677373 0.73254323 0.5797815
  0.57580173 0.23960851]
 [0.8019296  0.20208654 0.37803817 1.3179271  0.65516794 0.56702566
  0.398933   0.6586471  1.1245378  0.23546556 0.66814303 0.6113665
  0.5319061  0.40314004]
 [0.8358249  0.18507321 0.567202   1.4689873  0.716527   0.3419142
  0.5125026  0.8020761  1.1263188  0.22731774 0.8044332  0.68774116
  0.6363418  0.65175456]
 [0.67867464 0.5006777  1.0665727  1.0090833  0.6749406  0.22933567
  0.46408728 0.78004915 0.2860195  0.22762512 0.7221228  0.5532143
  0.58030295 0.7037346 ]
 [0.6425235  0.53773165 1.1072947  0.9417883  0.6691963  0.20637755
  0.4430275  0.7423874  0.24934186 0.26703677 0.70952225 0.55003875
  0.5828154  0.7120207 ]
 [0.7144363  0.7351413  0.6374073  0.72623277 0.6759317  0.18484013
  0.4834062  0.6301932  0.2466286  1.3020205  0.7137791  0.60387737
  0.6933429  0.68862045]
 [0.6183184  0.7684475  0.46243137 0.51841396 0.68776715 0.23714265
  0.49584943 0.6288314  0.20077296 1.3918104  0.59928966 0.566178
  0.63060904 0.68470067]
 [0.5498915  0.80339575 0.35977185 0.39178717 0.7013247  0.24034533
  0.47423804 0.613996   0.17841652 1.4835737  0.49696034 0.5977388
  0.6352336  0.6786483 ]
 [0.52473307 0.8207649  0.3537118  0.36329755 0.70905364 0.20322415
  0.4768781  0.6083425  0.16682293 1.4314556  0.44874445 0.57797813
  0.5863838  0.70096415]
 [0.53823984 0.70107794 0.34015495 0.4287821  0.61654824 0.24973263
  0.458125   0.6090275  0.22146404 0.44921547 0.42062718 0.292782
  0.26791465 0.617741  ]
 [0.463925   0.56097984 0.5888927  0.4095806  0.58076173 0.26159966
  0.46608672 0.5089147  0.21029484 0.32280445 0.34643334 0.15800573
  0.21716824 0.6415218 ]
 [0.36478633 0.57500386 0.8355441  0.33629408 0.6505313  0.1993548
  0.45533055 0.45637318 0.15339433 0.3421718  0.28826338 0.14382836
  0.16793154 0.7191229 ]
 [0.3726019  0.33352765 1.0740685  0.49316663 0.51601666 0.19066277
  0.43785018 0.42686462 0.1758343  0.24613287 0.37895805 0.14602987
  0.22022757 0.6426412 ]
 [0.39832288 0.2733841  0.90473807 0.514275   0.33096024 0.267116
  0.38020113 0.38362586 0.27085876 0.17890608 0.37879547 0.13008176
  0.21399254 0.42778516]
 [0.42482916 0.21592279 0.90110195 0.3993709  0.312436   0.2446999
  0.25639606 0.2596292  0.27482313 0.23088059 0.44045073 0.20907798
  0.19438507 0.25153732]
 [0.47002602 0.2432909  0.56401646 0.4839102  0.2783779  0.26335254
  0.30268493 0.3405018  0.26684052 0.23380801 0.44546098 0.1641439
  0.2541448  0.2681758 ]
 [0.52087873 0.25126493 0.3248703  0.47308254 0.35051987 0.2835385
  0.26249418 0.30538097 0.3449798  0.24643809 0.503675   0.17480935
  0.30319265 0.30735436]
 [0.5142855  0.26785105 0.48939025 0.4971094  0.359307   0.30928835
  0.29692855 0.303506   0.3705104  0.22175324 0.44558737 0.18222022
  0.29511636 0.2925106 ]
 [0.5285262  0.28061813 0.46896023 0.5023712  0.39147398 0.2997503
  0.35636175 0.35878217 0.34043366 0.2240425  0.46500236 0.18736221
  0.29308295 0.27836183]
 [0.50423515 0.28536427 0.36789253 0.4932742  0.37032747 0.27075016
  0.34686586 0.37281266 0.31500688 0.24705961 0.46280485 0.18127021
  0.2939063  0.2581965 ]
 [0.3925242  0.27366486 0.42209977 0.46049052 0.27525032 0.21769816
  0.32889253 0.3489976  0.2763429  0.21638337 0.362794   0.14807074
  0.26346225 0.25094017]
 [0.43459636 0.23936781 0.41779026 0.47158664 0.26558638 0.26277554
  0.26980835 0.30387196 0.31790084 0.20002392 0.4079502  0.14089087
  0.27139643 0.2584511 ]
 [0.38564414 0.26048258 0.48310173 0.45242193 0.22763917 0.22723496
  0.32214826 0.33847913 0.33145875 0.19526583 0.3581588  0.12072898
  0.26324844 0.2672589 ]
 [0.44113028 0.20005311 0.47290218 0.5115326  0.20691326 1.009641
  0.20664495 0.29693455 0.26827377 0.18878508 0.37235063 0.17596321
  0.2674376  0.31541678]
 [0.34783465 0.14884764 0.5954004  0.4184323  0.16126257 1.333315
  0.12024957 0.22589123 0.24687645 0.15895456 0.34136188 0.1876972
  0.19271134 0.26314738]
 [0.413419   0.24211615 0.69863045 0.50477016 0.25032783 0.38200188
  0.2691242  0.30265385 0.2725662  0.16975689 0.3547326  0.12661584
  0.21051224 0.33085588]
 [0.505545   0.24209481 0.42141545 0.5070016  0.3193066  0.30729312
  0.29025054 0.33268476 0.38000107 0.21512307 0.45613027 0.16721646
  0.2923323  0.2955725 ]
 [0.43188933 0.2386885  0.40506703 0.45678243 0.28749964 0.24827038
  0.24370296 0.29830432 0.2586243  0.22179592 0.41255897 0.1639787
  0.27416503 0.26591083]
 [0.4043453  0.24074695 0.41103655 0.45393035 0.2798614  0.2364425
  0.25914007 0.28776973 0.26519382 0.20158817 0.3837059  0.14511843
  0.27140698 0.26014662]
 [0.442687   0.2640496  0.38246462 0.47384503 0.30266342 0.23839468
  0.30271098 0.340743   0.26584214 0.23314624 0.4332751  0.15097946
  0.2788139  0.2578386 ]
 [0.45789793 0.24040763 0.37587902 0.4702403  0.296287   0.2584924
  0.24823949 0.31659615 0.2471732  0.22235589 0.4532891  0.1549054
  0.28303468 0.27984324]
 [0.4527005  0.21350685 0.35627577 0.43701297 0.28104582 0.26226386
  0.18049271 0.25068542 0.2716501  0.21075177 0.46845725 0.14356993
  0.28158706 0.2797027 ]
 [0.4717381  0.27767587 0.37535205 0.49186176 0.33418727 0.2478134
  0.32544544 0.36350077 0.27834406 0.23966846 0.4486739  0.16240932
  0.29395998 0.25634152]
 [0.44145632 0.21090938 0.3594258  0.43089095 0.2819455  0.24650869
  0.18135217 0.25443125 0.26691297 0.20502439 0.45433486 0.14630984
  0.27902532 0.27554065]
 [0.53748536 0.26371175 0.33802593 0.49683118 0.34123456 0.25697377
  0.28594995 0.34944227 0.32918626 0.25402907 0.5162282  0.1591626
  0.30863932 0.30574325]
 [0.59629357 0.34947765 0.33131045 0.52338517 0.3907489  0.3276368
  0.5053991  0.5206685  0.35144067 0.29959813 0.53522635 0.18549062
  0.29177052 0.31215295]
 [0.57215387 0.29921585 0.33744645 0.5331587  0.36612645 0.30100772
  0.38209832 0.44889057 0.34415898 0.26874912 0.5229094  0.17078224
  0.30966914 0.29361704]
 [0.66299224 0.2786758  0.3238259  0.5833298  0.40285456 0.35132515
  0.42885923 0.4546377  0.49297845 0.27357683 0.58386815 0.18747884
  0.34139794 0.39853555]
 [0.7738374  0.22495317 0.31964618 0.84363806 0.4647894  0.36812365
  0.54356253 0.5286768  0.7962382  0.24486816 0.72743726 0.31537956
  0.3828829  0.5837767 ]] (730, 48, 14)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.25 RMSE
Test Score: 0.17 MAE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,47,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:,47,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(50, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
model.add(Dropout(.2))
model.add(Dense(50))
model.add(Dropout(.2))

model.add(Dense(trainX.shape[2]))
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_38 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_39"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_38 (LSTM)               (None, 48, 50)            13000     
_________________________________________________________________
dropout_40 (Dropout)         (None, 48, 50)            0         
_________________________________________________________________
dense_65 (Dense)             (None, 48, 50)            2550      
_________________________________________________________________
dropout_41 (Dropout)         (None, 48, 50)            0         
_________________________________________________________________
dense_66 (Dense)             (None, 48, 14)            714       
=================================================================
Total params: 16,264
Trainable params: 16,264
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_C4 = model.fit(trainX,trainY, epochs=20, validation_split = 0.10, batch_size=32, callbacks=[lr_decay])
Epoch 1/20
95/95 [==============================] - 7s 74ms/step - loss: 0.1324 - mae: 0.2581 - val_loss: 0.0781 - val_mae: 0.2006
Epoch 2/20
95/95 [==============================] - 7s 72ms/step - loss: 0.0826 - mae: 0.2058 - val_loss: 0.0685 - val_mae: 0.1854
Epoch 3/20
95/95 [==============================] - 7s 72ms/step - loss: 0.0749 - mae: 0.1946 - val_loss: 0.0665 - val_mae: 0.1820
Epoch 4/20
95/95 [==============================] - 7s 71ms/step - loss: 0.0715 - mae: 0.1897 - val_loss: 0.0646 - val_mae: 0.1789
Epoch 5/20
95/95 [==============================] - 7s 73ms/step - loss: 0.0698 - mae: 0.1870 - val_loss: 0.0638 - val_mae: 0.1777
Epoch 6/20
95/95 [==============================] - 7s 72ms/step - loss: 0.0686 - mae: 0.1851 - val_loss: 0.0634 - val_mae: 0.1757
Epoch 7/20
95/95 [==============================] - 7s 74ms/step - loss: 0.0677 - mae: 0.1835 - val_loss: 0.0629 - val_mae: 0.1747
Epoch 8/20
95/95 [==============================] - 7s 72ms/step - loss: 0.0672 - mae: 0.1826 - val_loss: 0.0627 - val_mae: 0.1747
Epoch 9/20
95/95 [==============================] - 7s 72ms/step - loss: 0.0666 - mae: 0.1816 - val_loss: 0.0627 - val_mae: 0.1744
Epoch 10/20
95/95 [==============================] - 7s 72ms/step - loss: 0.0662 - mae: 0.1809 - val_loss: 0.0628 - val_mae: 0.1747
Epoch 11/20
95/95 [==============================] - 7s 72ms/step - loss: 0.0659 - mae: 0.1804 - val_loss: 0.0626 - val_mae: 0.1744
Epoch 12/20
95/95 [==============================] - 7s 72ms/step - loss: 0.0656 - mae: 0.1800 - val_loss: 0.0627 - val_mae: 0.1739
Epoch 13/20
95/95 [==============================] - 7s 72ms/step - loss: 0.0655 - mae: 0.1797 - val_loss: 0.0628 - val_mae: 0.1745
Epoch 14/20
95/95 [==============================] - 7s 72ms/step - loss: 0.0653 - mae: 0.1793 - val_loss: 0.0627 - val_mae: 0.1739
Epoch 15/20
95/95 [==============================] - 7s 73ms/step - loss: 0.0651 - mae: 0.1791 - val_loss: 0.0626 - val_mae: 0.1740
Epoch 16/20
95/95 [==============================] - 7s 73ms/step - loss: 0.0650 - mae: 0.1789 - val_loss: 0.0627 - val_mae: 0.1741
Epoch 17/20
95/95 [==============================] - 7s 74ms/step - loss: 0.0649 - mae: 0.1789 - val_loss: 0.0626 - val_mae: 0.1740
Epoch 18/20
95/95 [==============================] - 7s 76ms/step - loss: 0.0648 - mae: 0.1786 - val_loss: 0.0626 - val_mae: 0.1737
Epoch 19/20
95/95 [==============================] - 7s 70ms/step - loss: 0.0648 - mae: 0.1787 - val_loss: 0.0626 - val_mae: 0.1739
Epoch 20/20
95/95 [==============================] - 7s 73ms/step - loss: 0.0648 - mae: 0.1787 - val_loss: 0.0626 - val_mae: 0.1738
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_C4.history['loss'], label='train')
plt.plot(history_C4.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_C4 = model.predict(trainX, verbose = 1)
print(testingtrain_C4[0], testingtrain_C4.shape)

testingtest_C4 = model.predict(testX, verbose = 1)
print(testingtest_C4[0], testingtest_C4.shape)
105/105 [==============================] - 1s 8ms/step
[[0.66124696 0.24753538 0.3864787  0.8200805  0.5094328  0.3610135
  0.3957823  0.5238236  0.6329067  0.27743223 0.5956077  0.34181088
  0.39456078 0.4434315 ]
 [0.70320845 0.30710873 0.87452245 0.8189864  0.5960802  0.32889768
  0.4481026  0.6073353  0.3810078  0.32876122 0.72482073 0.43830878
  0.4857322  0.5673679 ]
 [0.69618136 0.6521945  0.5944942  0.67535067 0.69580114 0.24729621
  0.50290793 0.6687528  0.26332182 1.1424375  0.75934297 0.6065645
  0.6398396  0.5931247 ]
 [0.67513925 0.7322082  0.35859057 0.49666375 0.662572   0.25212154
  0.4834519  0.6210992  0.25424343 1.2840717  0.72748    0.6090212
  0.5960261  0.37950584]
 [0.6198406  0.7721067  0.23435906 0.3770225  0.65956163 0.22921655
  0.4809807  0.6186837  0.2273378  1.1842006  0.6367733  0.57840353
  0.48878998 0.25874925]
 [0.5118748  0.7907854  0.15956438 0.3116127  0.6594076  0.19104734
  0.48901218 0.6228246  0.2068471  1.0126468  0.5018986  0.5255382
  0.37755716 0.22459632]
 [0.42279705 0.7894778  0.11926091 0.27709958 0.6490388  0.17664596
  0.5029159  0.63154536 0.19696125 0.8405961  0.38977373 0.4686907
  0.28204766 0.21083307]
 [0.35924083 0.71468145 0.14065659 0.2797224  0.61743414 0.19007632
  0.5046738  0.62860775 0.18463641 0.52379864 0.3007696  0.3843399
  0.18689077 0.19903454]
 [0.32038873 0.63472176 0.19436361 0.2894796  0.549093   0.19644454
  0.5018219  0.61828244 0.1870907  0.3540437  0.258721   0.29420617
  0.13078776 0.21671057]
 [0.33495685 0.4855266  0.29452112 0.33079427 0.4304129  0.17537221
  0.4900859  0.5343701  0.21999463 0.23198503 0.28050727 0.21128872
  0.15635365 0.18049666]
 [0.34082785 0.3454786  0.49977675 0.3702579  0.32099098 0.15567344
  0.4341554  0.44614038 0.21571517 0.1773772  0.3276972  0.15301944
  0.19571373 0.18695088]
 [0.40246522 0.28887278 0.6243981  0.4199055  0.27862114 0.18031576
  0.41470736 0.3411482  0.22829449 0.18335666 0.34370413 0.14596397
  0.2497876  0.22189245]
 [0.4566054  0.25510868 0.5705383  0.44889456 0.23618542 0.20832583
  0.39163613 0.24366781 0.27113593 0.2140058  0.3319951  0.14993447
  0.28977302 0.22420385]
 [0.48689583 0.23831871 0.41636428 0.43230173 0.21738914 0.24855193
  0.3683576  0.18729715 0.32154348 0.25169995 0.3217017  0.1543892
  0.30228788 0.21225518]
 [0.48266956 0.2297563  0.34925637 0.4180458  0.2195115  0.25206167
  0.34322953 0.20167348 0.32099923 0.2608196  0.33416077 0.15142736
  0.2894217  0.21174955]
 [0.468565   0.22923696 0.30842346 0.41409662 0.21635841 0.26323342
  0.31829995 0.22125426 0.31001338 0.2680095  0.35096094 0.14672735
  0.2703685  0.20906743]
 [0.46985838 0.2609395  0.32345757 0.38624734 0.28545785 0.28022698
  0.30700177 0.25522268 0.28608957 0.25380942 0.34326956 0.17864312
  0.25107422 0.2048127 ]
 [0.4549667  0.24582618 0.30970582 0.4086666  0.26676148 0.26007777
  0.28616416 0.26100886 0.28540063 0.23665252 0.34443483 0.16537106
  0.24810866 0.21270448]
 [0.4640279  0.24424067 0.29289764 0.421355   0.25999326 0.25915453
  0.27051333 0.26870888 0.28397685 0.23924838 0.36961687 0.16772112
  0.24464816 0.20030025]
 [0.4814707  0.22381428 0.31345093 0.4432757  0.24493329 0.21971089
  0.25243157 0.30614132 0.28696036 0.22745302 0.4261916  0.1864475
  0.26310676 0.1755889 ]
 [0.45673436 0.20453495 0.389308   0.404795   0.21027373 0.9001731
  0.16969025 0.30080372 0.31020632 0.2018034  0.44294012 0.23034899
  0.24630728 0.18238823]
 [0.3975145  0.18972716 0.44354418 0.37781775 0.17065792 1.2844917
  0.09334748 0.23724887 0.30889466 0.18538846 0.3769784  0.2440867
  0.23281124 0.18362093]
 [0.3926536  0.18341058 0.45104188 0.42448357 0.1458988  1.1690452
  0.08036079 0.2051141  0.31663415 0.18998323 0.34079158 0.24116692
  0.25335786 0.21756974]
 [0.42214745 0.19394681 0.3627399  0.50418234 0.17179188 0.63451266
  0.14377455 0.2347936  0.34750494 0.206776   0.34066063 0.21455252
  0.26307696 0.27102017]
 [0.44703096 0.2041403  0.32078847 0.53745306 0.1863572  0.39095977
  0.17224455 0.2937519  0.3698737  0.20694396 0.3840662  0.19216627
  0.24912992 0.27164686]
 [0.43391213 0.1940514  0.32823107 0.508238   0.19477256 0.28695658
  0.17727181 0.28995147 0.33701605 0.19526543 0.37887093 0.1817019
  0.25357842 0.2508339 ]
 [0.43367353 0.19780976 0.30597678 0.49738538 0.20490304 0.24909422
  0.17717569 0.28892297 0.32934883 0.19937769 0.3742879  0.17855918
  0.2537638  0.25183412]
 [0.4389898  0.20888878 0.29510516 0.49453932 0.2278485  0.22681698
  0.19707543 0.31351578 0.31961474 0.19763097 0.3872151  0.18422629
  0.2560698  0.24665329]
 [0.43351847 0.21680412 0.3048862  0.48264685 0.23708607 0.19979826
  0.21001312 0.33041388 0.2932866  0.19502066 0.40005645 0.1700526
  0.24321601 0.24291083]
 [0.42857864 0.2197998  0.30576548 0.46689644 0.23973256 0.19138083
  0.22015816 0.33596408 0.2725697  0.19602941 0.40916613 0.15786369
  0.2356678  0.23707512]
 [0.42707372 0.22455695 0.31289288 0.459703   0.24882214 0.1932717
  0.22829197 0.33799058 0.26486772 0.19815941 0.41080067 0.15645775
  0.23698765 0.2354854 ]
 [0.43182388 0.23239195 0.32160416 0.46191633 0.2606783  0.19490764
  0.25277054 0.35956258 0.25991708 0.19897076 0.42761156 0.15521489
  0.23824838 0.24268734]
 [0.46932253 0.35770854 0.31344157 0.45207044 0.34775773 0.18301514
  0.32739997 0.41225186 0.24982801 0.43261072 0.4630614  0.2181328
  0.3009839  0.2782249 ]
 [0.49235272 0.40489256 0.29739818 0.42993185 0.3714574  0.17965604
  0.34601945 0.42026842 0.24906981 0.5519925  0.4959882  0.25514117
  0.32345957 0.23974916]
 [0.52841955 0.378909   0.3012389  0.437404   0.3792601  0.2107985
  0.36463088 0.414842   0.28414723 0.44680244 0.5017711  0.23274085
  0.3011512  0.22701314]
 [0.59164184 0.35239267 0.29698405 0.5274154  0.41348538 0.27143314
  0.407319   0.48834434 0.40812266 0.36384854 0.5549347  0.2592102
  0.29127264 0.22128755]
 [0.598891   0.3074932  0.3171562  0.52209777 0.40908125 0.32864055
  0.4111147  0.48059854 0.4180945  0.2894522  0.55618805 0.27128965
  0.29760715 0.18627964]
 [0.64668    0.27727786 0.33145192 0.5285174  0.42363876 0.3092351
  0.39633852 0.42172098 0.47874877 0.29194388 0.530685   0.26411936
  0.30583176 0.22125337]
 [0.66917884 0.276015   0.37647098 0.53120023 0.43575126 0.28762138
  0.37777972 0.39455092 0.49301657 0.30950832 0.5412583  0.27571514
  0.33219987 0.2460585 ]
 [0.683574   0.2852241  0.41269904 0.6041905  0.4551843  0.2838425
  0.41008496 0.46682003 0.47938174 0.29527542 0.57987213 0.2745328
  0.33145538 0.33238268]
 [0.7008343  0.28396437 0.43859577 0.58776975 0.4779542  0.29468587
  0.4138682  0.4658802  0.452302   0.30349943 0.5773155  0.26610443
  0.320571   0.3864591 ]
 [0.7635832  0.34745568 0.4822629  0.56520694 0.54386914 0.27861318
  0.43278128 0.47947246 0.44632795 0.4638429  0.6183464  0.29382622
  0.32502416 0.42149034]
 [0.76216084 0.29143608 0.8056821  0.5175973  0.5991694  0.28378296
  0.39338166 0.4393738  0.35962632 0.37189957 0.6349934  0.29135057
  0.2952408  0.43591282]
 [0.7189191  0.2385731  0.8155403  0.48605132 0.57131934 0.38964182
  0.2985353  0.433252   0.37253174 0.34845865 0.7166816  0.36642873
  0.30548918 0.25580388]
 [0.6535771  0.20629606 0.92217004 0.3484136  0.59972024 1.2895395
  0.17150243 0.41309482 0.30996782 0.2920156  0.67663485 0.5212497
  0.33590293 0.10925868]
 [0.68065417 0.2310636  0.97975594 0.4467863  0.656724   1.5536844
  0.14527166 0.44830707 0.3527892  0.30293536 0.67748046 0.71628076
  0.46173108 0.04552547]
 [0.6776097  0.17471543 0.51762897 1.2822729  0.6563003  1.0453311
  0.23886412 0.51930207 1.1634109  0.252583   0.5616701  0.80832815
  0.6015737  0.24830446]
 [0.73933035 0.25209004 0.37556323 1.5392334  0.66416705 0.4781949
  0.4152698  0.7076475  1.2794194  0.23557997 0.60987216 0.7262202
  0.6074862  0.47358045]] (3350, 48, 14)
23/23 [==============================] - 0s 7ms/step
[[0.56409895 0.2578358  0.32883894 0.5898306  0.38169518 0.28509054
  0.34540683 0.45840058 0.38588288 0.24216494 0.5587091  0.2049569
  0.29529005 0.372321  ]
 [0.6352585  0.28057268 0.35666507 0.62865084 0.42470032 0.2990809
  0.38455284 0.5138817  0.4286031  0.2594877  0.6425971  0.22203094
  0.3244911  0.41533297]
 [0.662783   0.292737   0.3716751  0.62479943 0.43781382 0.31416023
  0.4070227  0.5264366  0.41601518 0.26902637 0.6522712  0.21482222
  0.332885   0.44845265]
 [0.71560115 0.28178653 0.37978408 0.63588154 0.45699894 0.36131087
  0.44258732 0.5435213  0.46683297 0.2692957  0.66786677 0.21368968
  0.33929893 0.49152532]
 [0.7041934  0.27489862 0.38291046 0.60373956 0.4441811  0.36671013
  0.42997766 0.52480185 0.43620485 0.26756027 0.65721047 0.20678829
  0.33166066 0.47718534]
 [0.7225837  0.2770363  0.39885908 0.58913904 0.44784188 0.37799698
  0.41577423 0.5154618  0.42816707 0.2872251  0.671547   0.21004759
  0.33279788 0.4648998 ]
 [0.74136263 0.24926597 0.8543288  0.55606335 0.5816283  0.42526105
  0.36261433 0.51004    0.31693846 0.31142566 0.7547353  0.34696457
  0.38843814 0.46562558]
 [0.7499554  0.23598716 1.0576385  0.5288696  0.6357887  0.48571897
  0.32471913 0.4857873  0.23875305 0.32597932 0.77686703 0.43547055
  0.42386207 0.41136914]
 [0.8043828  0.23048645 1.1913579  0.40270212 0.7680514  1.3231205
  0.2615885  0.5127897  0.19078091 0.34815538 0.8437873  0.68391937
  0.50271535 0.2738703 ]
 [0.790012   0.23037654 0.9467267  0.54992527 0.7218956  1.2168556
  0.31154227 0.53742886 0.36012653 0.3202623  0.79697347 0.6985867
  0.5265366  0.25519574]
 [0.77809626 0.22618629 0.51173145 1.2365683  0.7186017  0.68185145
  0.4539336  0.68228626 1.0298446  0.25518414 0.71733    0.68495905
  0.5780924  0.46571597]
 [0.8309525  0.29750398 0.6705308  1.3657669  0.7349459  0.45310417
  0.56506544 0.8178068  1.0075363  0.25117803 0.789006   0.63811356
  0.6069369  0.65116155]
 [0.7782815  0.4272884  1.0721956  1.1467184  0.7507262  0.3095719
  0.5790522  0.8143689  0.5339324  0.35310367 0.815797   0.60335827
  0.6141986  0.74692535]
 [0.7107221  0.4950737  1.1458867  0.9467935  0.724437   0.2564875
  0.56458384 0.74636143 0.3164897  0.47837144 0.7680678  0.561955
  0.60929763 0.7472129 ]
 [0.6863932  0.7141673  0.82497674 0.6920914  0.745777   0.2442205
  0.550089   0.6824409  0.19406444 1.1781827  0.74517846 0.6502359
  0.70595497 0.702364  ]
 [0.62512666 0.80479735 0.4838321  0.51238275 0.700253   0.21291465
  0.5250114  0.60853726 0.20169151 1.3739108  0.62684035 0.59464335
  0.6479694  0.6243176 ]
 [0.57479537 0.87022656 0.33277658 0.43407255 0.7071675  0.19524615
  0.5218822  0.59881896 0.20479718 1.3934112  0.52366626 0.5641228
  0.58471906 0.62475145]
 [0.5286997  0.88339156 0.3319274  0.36002856 0.72725725 0.1894663
  0.512743   0.59311193 0.17115566 1.2703215  0.4314003  0.5163042
  0.4890361  0.638183  ]
 [0.45279002 0.80209816 0.31528652 0.37164187 0.6927575  0.179589
  0.528729   0.61769134 0.19450156 0.7852811  0.3154326  0.36153758
  0.30335134 0.6282423 ]
 [0.4232176  0.6958525  0.44878095 0.4002308  0.615247   0.18526888
  0.51613086 0.5928488  0.20318978 0.5094127  0.27183327 0.23514144
  0.20572174 0.6200258 ]
 [0.40776455 0.5956231  0.6922878  0.3997527  0.5754018  0.18773997
  0.47625393 0.53914106 0.16270763 0.36169514 0.266785   0.18443699
  0.17569181 0.59834725]
 [0.40125966 0.4216593  0.99959266 0.43103153 0.48408672 0.16565742
  0.40922475 0.41675547 0.14126483 0.2561422  0.31536102 0.14405502
  0.21940157 0.5388268 ]
 [0.3868288  0.3110327  1.1054181  0.43813217 0.40360576 0.1619079
  0.34490258 0.31238857 0.14094085 0.22236899 0.33100986 0.12747805
  0.24987257 0.4685867 ]
 [0.3913003  0.23591498 1.0601128  0.40880933 0.35163385 0.28051618
  0.26779315 0.21507755 0.15105733 0.22323564 0.33849928 0.15403442
  0.2834514  0.37120122]
 [0.4195373  0.21139583 0.7858685  0.4321905  0.29023635 0.23722115
  0.26739323 0.18543357 0.21561942 0.24732833 0.35261598 0.14495102
  0.29033455 0.32265377]
 [0.43534958 0.19494289 0.55130816 0.43095243 0.24257086 0.26227075
  0.24011433 0.18076485 0.2545753  0.26222283 0.3650776  0.13370416
  0.26865295 0.2878667 ]
 [0.4650441  0.18874484 0.4717109  0.4335467  0.24913226 0.24807706
  0.23027904 0.17937447 0.31362808 0.26978594 0.3549232  0.14959313
  0.2797337  0.26948777]
 [0.46775982 0.19195265 0.4239687  0.4358211  0.25246644 0.23251277
  0.2213281  0.19961622 0.32769778 0.26160243 0.3575023  0.15459158
  0.27399224 0.25878206]
 [0.4705482  0.19798721 0.3663358  0.44328967 0.23756362 0.22835699
  0.20561424 0.22882563 0.33057454 0.25770918 0.38050175 0.14696172
  0.25143722 0.2529988 ]
 [0.44686335 0.19830146 0.35401848 0.4428883  0.22697201 0.22648385
  0.19655591 0.24840316 0.30574375 0.23745143 0.38556176 0.13921405
  0.23472014 0.24949065]
 [0.4427625  0.1927495  0.3517531  0.45609623 0.2218114  0.2304018
  0.18994786 0.27163026 0.30775768 0.22454229 0.4112057  0.14110394
  0.23133206 0.251962  ]
 [0.44270965 0.19178918 0.32960784 0.46261862 0.21810661 0.24963492
  0.1968312  0.28629684 0.31947443 0.21741353 0.42000744 0.13981079
  0.22716385 0.26193807]
 [0.42408767 0.16567239 0.41168323 0.41202897 0.19977155 0.9026675
  0.14747925 0.25539523 0.35679108 0.1880399  0.40083376 0.18083696
  0.21566135 0.2855496 ]
 [0.3887165  0.14808257 0.51768744 0.3633032  0.21541496 1.244153
  0.08622982 0.21813783 0.36607283 0.16462584 0.368712   0.23428437
  0.21689865 0.26480734]
 [0.3984347  0.16087374 0.49238724 0.4805084  0.22278327 0.61539
  0.15119188 0.26021406 0.37990415 0.15954977 0.38282877 0.21066281
  0.25080934 0.28271997]
 [0.41216192 0.1678504  0.41411936 0.5025503  0.21910015 0.40150255
  0.15825939 0.2796032  0.3645422  0.17148925 0.4008657  0.1925721
  0.2474029  0.2618661 ]
 [0.42037797 0.17038885 0.36200356 0.49618202 0.2171803  0.302328
  0.16053449 0.28371432 0.3411743  0.18002588 0.4037664  0.1781742
  0.2437303  0.24615493]
 [0.41598508 0.1775002  0.3305019  0.48139298 0.21756685 0.26371866
  0.16556445 0.2869547  0.31914377 0.18326882 0.39925653 0.16442043
  0.23534456 0.24343291]
 [0.42885122 0.18242559 0.30432463 0.48667833 0.21306707 0.24396142
  0.17020616 0.30908728 0.3127854  0.18575157 0.42474064 0.15374714
  0.2265141  0.24823612]
 [0.4473227  0.18824862 0.28744552 0.49759895 0.21353105 0.24022073
  0.17429864 0.32901844 0.31627473 0.19005069 0.45308053 0.15046702
  0.22172625 0.2539813 ]
 [0.458381   0.19158268 0.274737   0.5031602  0.2129067  0.24071357
  0.1771835  0.34330168 0.31980398 0.19169056 0.4737941  0.1476534
  0.21868636 0.2599992 ]
 [0.46622613 0.19858876 0.2766513  0.50388914 0.22896095 0.21649781
  0.1875548  0.358158   0.31582463 0.193727   0.4794336  0.15047976
  0.22443968 0.2633064 ]
 [0.469514   0.20620447 0.28229436 0.5013245  0.23910399 0.22417875
  0.20481327 0.36701518 0.3118535  0.19521354 0.48438847 0.15215062
  0.23169327 0.27092358]
 [0.47874826 0.21688873 0.29294622 0.50613946 0.2521407  0.24075198
  0.23694043 0.3881419  0.30971587 0.19621243 0.5005844  0.14983316
  0.23742598 0.29628965]
 [0.53771263 0.2363415  0.3187724  0.55817544 0.27710468 0.2770973
  0.3056645  0.44689235 0.35013878 0.20872387 0.5788934  0.16196677
  0.2645538  0.34768158]
 [0.5378513  0.2421836  0.32706133 0.5500109  0.2915113  0.26462716
  0.31948215 0.45461598 0.33563763 0.21354087 0.5751206  0.1611719
  0.264549   0.35157037]
 [0.57034737 0.23903605 0.33629373 0.5821075  0.32598734 0.28844166
  0.3526805  0.47313777 0.38806674 0.22752023 0.58323145 0.18004794
  0.28303975 0.38060609]
 [0.7191599  0.25945815 0.36476967 0.7313634  0.45507002 0.34657016
  0.44721508 0.56292546 0.5650666  0.27853933 0.72034854 0.24734472
  0.37353313 0.4883526 ]] (730, 48, 14)
In [ ]:
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_C4[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

testMAE = np.mean(mae(testY[:,1,:], testingtest_C4[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.26 RMSE
Test Score: 0.19 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest_C4[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_C4[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
df_W = df_W.groupby(['Datetime']).mean()
df_W.head()
Out[ ]:
1 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 ... 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
Datetime
2012-07-01 00:00:00 0.855 0.965 0.084 0.108 0.260 0.087 0.208 0.025 0.020 0.567 0.241 0.234 0.191 0.066 0.355 0.122 0.184 0.075 0.050 0.187 0.067 0.043 0.130 0.038 0.863 0.612 0.106 0.068 0.045 1.127 0.060 0.668 0.269 0.277 0.183 0.028 0.032 0.101 0.071 0.138 ... 0.584 0.490 0.783 0.061 0.100 0.000 0.761 0.082 0.725 0.252 1.050 0.151 0.055 0.251 0.131 0.081 0.081 0.060 0.094 0.791 1.096 0.313 0.055 0.075 0.048 0.147 0.788 0.520 0.726 0.091 0.221 0.075 0.183 0.134 0.075 0.030 0.141 0.157 0.541 0.219
2012-07-01 00:30:00 0.786 0.927 0.084 0.098 0.253 0.098 0.151 0.022 0.036 0.547 0.197 0.343 0.176 0.067 0.508 0.121 0.128 0.075 0.063 0.169 0.069 0.117 0.127 0.088 0.813 0.609 0.088 0.048 0.043 1.262 0.060 0.661 0.331 0.276 0.077 0.074 0.031 0.073 0.071 0.110 ... 0.136 0.205 0.676 0.058 0.075 0.006 1.390 0.107 0.675 0.216 0.996 0.241 0.053 0.159 0.106 0.125 0.091 0.056 0.063 0.787 0.627 0.106 0.042 0.081 0.042 0.130 0.738 0.177 0.733 0.110 0.231 0.075 0.177 0.174 0.084 0.053 0.257 0.127 0.100 0.099
2012-07-01 01:00:00 0.604 1.359 0.082 0.105 0.180 0.064 0.092 0.011 0.009 0.613 0.163 0.234 0.199 0.052 0.555 0.147 0.096 0.038 0.038 0.186 0.059 0.054 0.146 0.031 0.863 0.414 0.094 0.065 0.040 1.157 0.059 0.543 0.281 0.279 0.122 0.061 0.045 0.041 0.090 0.129 ... 0.108 0.200 0.683 0.122 0.087 0.094 1.450 0.090 0.701 0.185 1.013 0.181 0.092 0.298 0.119 0.154 0.093 0.097 0.106 1.057 0.691 0.119 0.026 0.116 0.790 0.203 0.675 0.439 0.750 0.065 0.247 0.063 0.193 0.165 0.054 0.044 0.197 0.122 0.090 0.134
2012-07-01 01:30:00 0.544 0.060 0.084 0.075 0.220 0.089 0.152 0.023 0.045 0.519 0.163 0.339 0.164 0.057 0.542 0.141 0.098 0.075 0.063 0.176 0.066 0.035 0.133 0.100 0.838 0.264 0.106 0.074 0.048 0.232 0.053 0.276 0.169 0.305 0.025 0.027 0.024 0.040 0.059 0.029 ... 0.103 0.177 0.666 0.137 0.075 0.063 1.841 0.077 0.625 0.169 0.324 0.154 0.055 0.267 0.119 0.188 0.093 0.157 0.081 0.833 0.654 0.088 0.074 0.083 1.146 0.120 0.175 0.041 0.211 0.064 0.193 0.444 0.185 0.104 0.062 0.029 0.273 0.120 0.094 0.100
2012-07-01 02:00:00 0.597 0.059 0.086 0.102 0.171 0.067 0.083 0.024 0.099 0.314 0.207 0.330 0.190 0.066 0.571 0.219 0.097 0.069 0.063 0.172 0.070 0.104 0.133 0.025 0.838 0.215 0.100 0.080 0.033 0.214 0.476 0.285 0.175 0.285 0.087 0.042 0.051 0.040 0.100 0.052 ... 0.121 0.173 0.668 0.089 0.094 0.006 1.074 0.024 0.651 0.157 0.170 0.154 0.050 0.199 0.119 0.135 0.073 0.065 0.106 0.838 0.204 0.137 0.026 0.095 1.049 0.165 0.188 0.039 0.159 0.198 0.141 0.081 0.176 0.130 0.074 0.044 0.206 0.106 0.046 0.123

5 rows × 299 columns

In [ ]:
################
#df_W = df_W.transpose()
df_W.shape
Out[ ]:
(4416, 299)
In [ ]:
df_W = df_W.values
#cap = np.percentile(X_A_C1, 97)   
#X_A_C1[X_A_C1 > cap] = cap
In [ ]:
training_size=int(df_W.shape[0]*0.80)

test_size=(df_W.shape[0])-training_size

train,test=df_W[0:training_size],df_W[training_size:(df_W.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)
(3350, 48, 299) (3350, 48, 299)
(730, 48, 299) (730, 48, 299)
In [ ]:
                            ###Building a sequential network:
Model = models.Sequential()
Model.add(layers.Dense(900, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model.add(Dropout(0.5))
Model.add(BatchNormalization())
Model.add(layers.Dense(600, activation='relu'))
Model.add(Dropout(0.5))
Model.add(BatchNormalization())
Model.add((Dense(trainX.shape[2])))
Model.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model.summary()
Model: "sequential_9"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_19 (Dense)             (None, 48, 900)           270000    
_________________________________________________________________
dropout_10 (Dropout)         (None, 48, 900)           0         
_________________________________________________________________
batch_normalization_10 (Batc (None, 48, 900)           3600      
_________________________________________________________________
dense_20 (Dense)             (None, 48, 600)           540600    
_________________________________________________________________
dropout_11 (Dropout)         (None, 48, 600)           0         
_________________________________________________________________
batch_normalization_11 (Batc (None, 48, 600)           2400      
_________________________________________________________________
dense_21 (Dense)             (None, 48, 299)           179699    
=================================================================
Total params: 996,299
Trainable params: 993,299
Non-trainable params: 3,000
_________________________________________________________________
In [ ]:
model_train = Model.fit(trainX,trainY, epochs=30, validation_split = 0.10, batch_size=64)
Epoch 1/30
48/48 [==============================] - 2s 32ms/step - loss: 0.6441 - mae: 0.5463 - val_loss: 0.0770 - val_mae: 0.1452
Epoch 2/30
48/48 [==============================] - 1s 25ms/step - loss: 0.1627 - mae: 0.2276 - val_loss: 0.0620 - val_mae: 0.1402
Epoch 3/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0880 - mae: 0.1696 - val_loss: 0.0606 - val_mae: 0.1402
Epoch 4/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0662 - mae: 0.1486 - val_loss: 0.0597 - val_mae: 0.1390
Epoch 5/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0588 - mae: 0.1408 - val_loss: 0.0589 - val_mae: 0.1370
Epoch 6/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0555 - mae: 0.1375 - val_loss: 0.0577 - val_mae: 0.1337
Epoch 7/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0533 - mae: 0.1357 - val_loss: 0.0575 - val_mae: 0.1305
Epoch 8/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0514 - mae: 0.1336 - val_loss: 0.0574 - val_mae: 0.1299
Epoch 9/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0498 - mae: 0.1323 - val_loss: 0.0569 - val_mae: 0.1287
Epoch 10/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0484 - mae: 0.1308 - val_loss: 0.0566 - val_mae: 0.1275
Epoch 11/30
48/48 [==============================] - 1s 25ms/step - loss: 0.0472 - mae: 0.1297 - val_loss: 0.0559 - val_mae: 0.1271
Epoch 12/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0459 - mae: 0.1283 - val_loss: 0.0553 - val_mae: 0.1254
Epoch 13/30
48/48 [==============================] - 1s 25ms/step - loss: 0.0449 - mae: 0.1273 - val_loss: 0.0551 - val_mae: 0.1241
Epoch 14/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0439 - mae: 0.1263 - val_loss: 0.0542 - val_mae: 0.1234
Epoch 15/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0431 - mae: 0.1257 - val_loss: 0.0546 - val_mae: 0.1239
Epoch 16/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0422 - mae: 0.1247 - val_loss: 0.0541 - val_mae: 0.1222
Epoch 17/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0414 - mae: 0.1238 - val_loss: 0.0538 - val_mae: 0.1217
Epoch 18/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0407 - mae: 0.1231 - val_loss: 0.0534 - val_mae: 0.1215
Epoch 19/30
48/48 [==============================] - 1s 25ms/step - loss: 0.0402 - mae: 0.1225 - val_loss: 0.0533 - val_mae: 0.1215
Epoch 20/30
48/48 [==============================] - 1s 25ms/step - loss: 0.0396 - mae: 0.1219 - val_loss: 0.0537 - val_mae: 0.1213
Epoch 21/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0390 - mae: 0.1211 - val_loss: 0.0538 - val_mae: 0.1223
Epoch 22/30
48/48 [==============================] - 1s 25ms/step - loss: 0.0385 - mae: 0.1208 - val_loss: 0.0533 - val_mae: 0.1226
Epoch 23/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0382 - mae: 0.1204 - val_loss: 0.0536 - val_mae: 0.1214
Epoch 24/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0377 - mae: 0.1197 - val_loss: 0.0533 - val_mae: 0.1216
Epoch 25/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0372 - mae: 0.1193 - val_loss: 0.0535 - val_mae: 0.1210
Epoch 26/30
48/48 [==============================] - 1s 27ms/step - loss: 0.0370 - mae: 0.1190 - val_loss: 0.0532 - val_mae: 0.1212
Epoch 27/30
48/48 [==============================] - 1s 27ms/step - loss: 0.0367 - mae: 0.1189 - val_loss: 0.0536 - val_mae: 0.1213
Epoch 28/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0364 - mae: 0.1183 - val_loss: 0.0538 - val_mae: 0.1221
Epoch 29/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0361 - mae: 0.1179 - val_loss: 0.0534 - val_mae: 0.1211
Epoch 30/30
48/48 [==============================] - 1s 26ms/step - loss: 0.0358 - mae: 0.1178 - val_loss: 0.0529 - val_mae: 0.1217
In [ ]:
Seq_train = Model.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
105/105 [==============================] - 1s 7ms/step
[[0.23346883 0.09652    0.12367184 ... 0.15443842 0.17112415 0.5347951 ]
 [0.25321    0.08868312 0.12241136 ... 0.13479313 0.14879185 0.349885  ]
 [0.23056145 0.07678014 0.11004689 ... 0.10139943 0.16898564 0.22574013]
 ...
 [0.4392503  0.07160397 0.14309683 ... 0.23357219 0.48910338 0.78794956]
 [0.22826858 0.0822634  0.11289629 ... 0.16578233 0.14601153 1.0552297 ]
 [0.1386768  0.0535927  0.08798277 ... 0.13558793 0.0761511  0.93278056]] (3350, 48, 299)
23/23 [==============================] - 0s 6ms/step
[[0.27661517 0.06579597 0.22626859 ... 0.27576035 0.40742815 0.8046579 ]
 [0.32646394 0.07177068 0.19654392 ... 0.2653237  0.3926024  0.53587985]
 [0.23228884 0.06601135 0.21222079 ... 0.28538632 0.43304026 0.459013  ]
 ...
 [0.23640735 0.0960467  0.2759302  ... 0.25786257 0.34316003 0.8445221 ]
 [0.2406078  0.07055688 0.29968977 ... 0.26246333 0.51363224 0.80006325]
 [0.25052583 0.06975615 0.26096183 ... 0.25570256 0.6128285  0.88463694]] (730, 48, 299)
In [ ]:
testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))

testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))
Test Score: 0.13 MAE
Test Score: 0.25 RMSE
In [ ]:
plt.imshow(testY[:48,47,:])
plt.show()
plt.imshow(Seq_test[:48,47,:])
plt.show()
In [ ]:
aa=[x for x in range(300)]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:300,1,5], marker='.', label="actual")
plt.plot(aa, Seq_test[:300,1,5], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()
model.add(LSTM(300, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))

model.add(Dense(trainX.shape[2]))

model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = ['mae'], loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_4 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_10"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_4 (LSTM)                (None, 48, 300)           720000    
_________________________________________________________________
dense_22 (Dense)             (None, 48, 299)           89999     
=================================================================
Total params: 809,999
Trainable params: 809,999
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_W = model.fit(trainX,trainY, epochs=20, batch_size=64, validation_split = 0.10, callbacks=[lr_decay])
#, callbacks=[lr_decay]
Epoch 1/20
48/48 [==============================] - 7s 148ms/step - loss: 0.0780 - mae: 0.1713 - val_loss: 0.0626 - val_mae: 0.1463
Epoch 2/20
48/48 [==============================] - 7s 145ms/step - loss: 0.0576 - mae: 0.1429 - val_loss: 0.0585 - val_mae: 0.1378
Epoch 3/20
48/48 [==============================] - 7s 145ms/step - loss: 0.0528 - mae: 0.1361 - val_loss: 0.0573 - val_mae: 0.1352
Epoch 4/20
48/48 [==============================] - 7s 146ms/step - loss: 0.0501 - mae: 0.1327 - val_loss: 0.0569 - val_mae: 0.1355
Epoch 5/20
48/48 [==============================] - 7s 145ms/step - loss: 0.0481 - mae: 0.1303 - val_loss: 0.0568 - val_mae: 0.1355
Epoch 6/20
48/48 [==============================] - 7s 143ms/step - loss: 0.0466 - mae: 0.1286 - val_loss: 0.0568 - val_mae: 0.1364
Epoch 7/20
48/48 [==============================] - 7s 145ms/step - loss: 0.0454 - mae: 0.1273 - val_loss: 0.0568 - val_mae: 0.1365
Epoch 8/20
48/48 [==============================] - 7s 150ms/step - loss: 0.0446 - mae: 0.1264 - val_loss: 0.0570 - val_mae: 0.1369
Epoch 9/20
48/48 [==============================] - 7s 148ms/step - loss: 0.0439 - mae: 0.1256 - val_loss: 0.0572 - val_mae: 0.1374
Epoch 10/20
48/48 [==============================] - 7s 151ms/step - loss: 0.0434 - mae: 0.1251 - val_loss: 0.0574 - val_mae: 0.1386
Epoch 11/20
48/48 [==============================] - 7s 147ms/step - loss: 0.0430 - mae: 0.1247 - val_loss: 0.0574 - val_mae: 0.1383
Epoch 12/20
48/48 [==============================] - 7s 148ms/step - loss: 0.0427 - mae: 0.1243 - val_loss: 0.0575 - val_mae: 0.1386
Epoch 13/20
48/48 [==============================] - 7s 144ms/step - loss: 0.0424 - mae: 0.1240 - val_loss: 0.0576 - val_mae: 0.1386
Epoch 14/20
48/48 [==============================] - 7s 151ms/step - loss: 0.0422 - mae: 0.1238 - val_loss: 0.0576 - val_mae: 0.1390
Epoch 15/20
48/48 [==============================] - 7s 138ms/step - loss: 0.0420 - mae: 0.1236 - val_loss: 0.0576 - val_mae: 0.1387
Epoch 16/20
48/48 [==============================] - 7s 149ms/step - loss: 0.0418 - mae: 0.1234 - val_loss: 0.0577 - val_mae: 0.1391
Epoch 17/20
48/48 [==============================] - 7s 147ms/step - loss: 0.0417 - mae: 0.1233 - val_loss: 0.0578 - val_mae: 0.1392
Epoch 18/20
48/48 [==============================] - 7s 148ms/step - loss: 0.0416 - mae: 0.1232 - val_loss: 0.0579 - val_mae: 0.1395
Epoch 19/20
48/48 [==============================] - 7s 139ms/step - loss: 0.0416 - mae: 0.1231 - val_loss: 0.0578 - val_mae: 0.1393
Epoch 20/20
48/48 [==============================] - 7s 144ms/step - loss: 0.0415 - mae: 0.1231 - val_loss: 0.0578 - val_mae: 0.1394
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_W.history['loss'], label='train')
plt.plot(history_W.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain = model.predict(trainX, verbose = 1)
print(testingtrain[0], testingtrain.shape)

testingtest = model.predict(testX, verbose = 1)
print(testingtest[0], testingtest.shape)
105/105 [==============================] - 2s 16ms/step
[[ 3.4253487e-01  6.3908242e-02  4.3157905e-02 ...  6.8067379e-02
   4.9233444e-02  4.3851614e-01]
 [ 4.0054786e-01  7.4890114e-02  4.9673177e-02 ...  8.2694620e-02
  -4.4122338e-04  3.8137755e-01]
 [ 4.2512318e-01  7.3448099e-02  5.5879146e-02 ...  5.3426277e-02
  -2.6924912e-02  2.3933883e-01]
 ...
 [ 6.0410351e-01  4.1760448e-02  1.6500179e-01 ...  2.3181297e-01
   8.3820713e-01  6.6015804e-01]
 [ 4.1374156e-01  6.2047366e-02  1.5501682e-01 ...  2.1459053e-01
   5.2373546e-01  8.1059766e-01]
 [ 2.2784349e-01  7.3282987e-02  1.3825624e-01 ...  1.7542544e-01
   2.6215562e-01  7.9659307e-01]] (3350, 48, 299)
23/23 [==============================] - 0s 15ms/step
[[0.44392824 0.09437361 0.21481405 ... 0.15291992 0.5975416  0.87813693]
 [0.28706747 0.04396906 0.24432082 ... 0.23452382 0.47921377 0.7262068 ]
 [0.25116602 0.03403007 0.20086208 ... 0.3437292  0.4649534  0.6238597 ]
 ...
 [0.37855816 0.10486328 0.38132685 ... 0.3367432  0.46140957 0.84000546]
 [0.25984296 0.1843184  0.35138258 ... 0.28708786 0.71453494 0.94356936]
 [0.3275539  0.15282406 0.34597474 ... 0.25952098 0.89883757 1.0435729 ]] (730, 48, 299)
In [ ]:
trainScore = math.sqrt(mean_squared_error(trainY[:,1,:], testingtrain[:,1,:]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


trainMAE = np.mean(mae(trainY[:,1,:], testingtrain[:,1,:]))
print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Train Score: 0.22 RMSE
Test Score: 0.25 RMSE
Train Score: 0.13 MAE
Test Score: 0.14 MAE
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest[:48,1,:])
plt.show()
In [ ]:
aa=[x for x in range(300)]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:300,1,1], marker='.', label="actual")
plt.plot(aa, testingtest[:300,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [1]:
df_A = df_A.groupby(['Datetime']).mean()
df_A.head()
In [ ]:
df_A = df_A.values
#cap = np.percentile(X_A_C1, 97)   
#X_A_C1[X_A_C1 > cap] = cap
In [ ]:
training_size=int(df_A.shape[0]*0.80)

test_size=(df_A.shape[0])-training_size

train,test= df_A[0:training_size],df_A[training_size:(df_A.shape[0])]

print(train.shape, test.shape)
(3532, 299) (884, 299)
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3388, 48, 299) (3388, 48, 299) 
 (740, 48, 299) (740, 48, 299)
In [ ]:
                             
###Building a sequential network:
Model = models.Sequential()
Model.add(layers.Dense(500, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model.add(Dropout(0.2))
Model.add(BatchNormalization())

Model.add(layers.Dense(300, activation='relu'))
Model.add(Dropout(0.2))
Model.add(BatchNormalization())

Model.add((Dense(trainX.shape[2])))
Model.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model.summary()
Model: "sequential_11"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_23 (Dense)             (None, 48, 500)           150000    
_________________________________________________________________
dropout_12 (Dropout)         (None, 48, 500)           0         
_________________________________________________________________
batch_normalization_12 (Batc (None, 48, 500)           2000      
_________________________________________________________________
dense_24 (Dense)             (None, 48, 300)           150300    
_________________________________________________________________
dropout_13 (Dropout)         (None, 48, 300)           0         
_________________________________________________________________
batch_normalization_13 (Batc (None, 48, 300)           1200      
_________________________________________________________________
dense_25 (Dense)             (None, 48, 299)           89999     
=================================================================
Total params: 393,499
Trainable params: 391,899
Non-trainable params: 1,600
_________________________________________________________________
In [ ]:
model_train = Model.fit(trainX,trainY, epochs=20, validation_split = 0.10, batch_size = 64)
Epoch 1/20
48/48 [==============================] - 1s 26ms/step - loss: 0.4507 - mae: 0.4790 - val_loss: 0.0903 - val_mae: 0.1749
Epoch 2/20
48/48 [==============================] - 1s 19ms/step - loss: 0.1460 - mae: 0.2393 - val_loss: 0.0687 - val_mae: 0.1681
Epoch 3/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0972 - mae: 0.1866 - val_loss: 0.0669 - val_mae: 0.1577
Epoch 4/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0754 - mae: 0.1622 - val_loss: 0.0657 - val_mae: 0.1557
Epoch 5/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0635 - mae: 0.1481 - val_loss: 0.0647 - val_mae: 0.1514
Epoch 6/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0564 - mae: 0.1392 - val_loss: 0.0638 - val_mae: 0.1497
Epoch 7/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0520 - mae: 0.1334 - val_loss: 0.0628 - val_mae: 0.1500
Epoch 8/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0489 - mae: 0.1300 - val_loss: 0.0624 - val_mae: 0.1489
Epoch 9/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0464 - mae: 0.1270 - val_loss: 0.0614 - val_mae: 0.1479
Epoch 10/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0445 - mae: 0.1252 - val_loss: 0.0610 - val_mae: 0.1473
Epoch 11/20
48/48 [==============================] - 1s 18ms/step - loss: 0.0426 - mae: 0.1232 - val_loss: 0.0606 - val_mae: 0.1457
Epoch 12/20
48/48 [==============================] - 1s 18ms/step - loss: 0.0410 - mae: 0.1215 - val_loss: 0.0603 - val_mae: 0.1456
Epoch 13/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0396 - mae: 0.1203 - val_loss: 0.0599 - val_mae: 0.1448
Epoch 14/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0385 - mae: 0.1190 - val_loss: 0.0592 - val_mae: 0.1433
Epoch 15/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0373 - mae: 0.1178 - val_loss: 0.0594 - val_mae: 0.1430
Epoch 16/20
48/48 [==============================] - 1s 18ms/step - loss: 0.0364 - mae: 0.1168 - val_loss: 0.0593 - val_mae: 0.1429
Epoch 17/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0355 - mae: 0.1158 - val_loss: 0.0589 - val_mae: 0.1432
Epoch 18/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0347 - mae: 0.1150 - val_loss: 0.0588 - val_mae: 0.1427
Epoch 19/20
48/48 [==============================] - 1s 19ms/step - loss: 0.0340 - mae: 0.1142 - val_loss: 0.0594 - val_mae: 0.1439
Epoch 20/20
48/48 [==============================] - 1s 18ms/step - loss: 0.0334 - mae: 0.1135 - val_loss: 0.0594 - val_mae: 0.1441
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(model_train.history['loss'], label='train')
plt.plot(model_train.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
Seq_train = Model.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
106/106 [==============================] - 1s 6ms/step
[[ 0.07271196  0.05837108  0.12131897 ...  0.11215004  0.13645366
   0.8266016 ]
 [-0.01014934  0.05507302  0.10498718 ...  0.09370653 -0.08905828
   0.4107679 ]
 [ 0.14518206  0.06470376  0.09804516 ...  0.10678159  0.14493449
   0.42355123]
 ...
 [ 0.5469502   0.1353459   0.16609302 ...  0.17302877  0.32711798
   0.37530226]
 [ 0.6116637   0.10597423  0.15141018 ...  0.14807011  0.22631182
   0.45294324]
 [ 0.19796412  0.09119225  0.13233149 ...  0.1204965   0.15231454
   0.864897  ]] (3388, 48, 299)
24/24 [==============================] - 0s 6ms/step
[[0.26104185 0.07271684 0.18727519 ... 0.14996901 0.49759775 0.39994836]
 [0.26871043 0.07796756 0.22012407 ... 0.14379503 0.52914226 0.44470367]
 [0.20989901 0.06050642 0.26412594 ... 0.11161552 0.3675082  0.17051674]
 ...
 [0.20516586 0.07065181 0.13331214 ... 0.14318152 0.6363328  1.0577956 ]
 [0.23536102 0.07061865 0.19477522 ... 0.12423705 0.35711086 0.36616367]
 [0.22490692 0.08569301 0.21014598 ... 0.13811873 0.33946773 0.40137777]] (740, 48, 299)
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(Seq_test[:48,1,:])
plt.show()
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.36 RMSE
Test Score: 0.20 MAE
In [ ]:
aa=[x for x in range(300)]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:300,1,1])
plt.plot(aa, Seq_test[:300,1,1],'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))



#model.add(LSTM(100,  activation='relu'))
model.add(Dense(trainX.shape[2]))

#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_5 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_12"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_5 (LSTM)                (None, 48, 200)           400000    
_________________________________________________________________
dense_26 (Dense)             (None, 48, 299)           60099     
=================================================================
Total params: 460,099
Trainable params: 460,099
Non-trainable params: 0
_________________________________________________________________
In [ ]:
##### fit model
history_A = model.fit(trainX, trainY, epochs=20, validation_split = 0.10, batch_size = 64, callbacks=[lr_decay])
#validation_data = (valX, valY)
Epoch 1/20
48/48 [==============================] - 7s 137ms/step - loss: 0.0816 - mae: 0.1741 - val_loss: 0.0710 - val_mae: 0.1658
Epoch 2/20
48/48 [==============================] - 7s 137ms/step - loss: 0.0590 - mae: 0.1438 - val_loss: 0.0650 - val_mae: 0.1559
Epoch 3/20
48/48 [==============================] - 6s 130ms/step - loss: 0.0534 - mae: 0.1363 - val_loss: 0.0627 - val_mae: 0.1516
Epoch 4/20
48/48 [==============================] - 6s 135ms/step - loss: 0.0506 - mae: 0.1326 - val_loss: 0.0619 - val_mae: 0.1520
Epoch 5/20
48/48 [==============================] - 6s 133ms/step - loss: 0.0488 - mae: 0.1304 - val_loss: 0.0616 - val_mae: 0.1522
Epoch 6/20
48/48 [==============================] - 7s 137ms/step - loss: 0.0475 - mae: 0.1289 - val_loss: 0.0613 - val_mae: 0.1512
Epoch 7/20
48/48 [==============================] - 6s 131ms/step - loss: 0.0466 - mae: 0.1278 - val_loss: 0.0611 - val_mae: 0.1515
Epoch 8/20
48/48 [==============================] - 6s 128ms/step - loss: 0.0458 - mae: 0.1270 - val_loss: 0.0613 - val_mae: 0.1517
Epoch 9/20
48/48 [==============================] - 6s 126ms/step - loss: 0.0452 - mae: 0.1263 - val_loss: 0.0615 - val_mae: 0.1535
Epoch 10/20
48/48 [==============================] - 7s 137ms/step - loss: 0.0447 - mae: 0.1258 - val_loss: 0.0613 - val_mae: 0.1532
Epoch 11/20
48/48 [==============================] - 6s 130ms/step - loss: 0.0444 - mae: 0.1254 - val_loss: 0.0614 - val_mae: 0.1530
Epoch 12/20
48/48 [==============================] - 6s 132ms/step - loss: 0.0441 - mae: 0.1251 - val_loss: 0.0613 - val_mae: 0.1526
Epoch 13/20
48/48 [==============================] - 6s 126ms/step - loss: 0.0438 - mae: 0.1248 - val_loss: 0.0615 - val_mae: 0.1532
Epoch 14/20
48/48 [==============================] - 6s 126ms/step - loss: 0.0436 - mae: 0.1246 - val_loss: 0.0615 - val_mae: 0.1533
Epoch 15/20
48/48 [==============================] - 6s 127ms/step - loss: 0.0435 - mae: 0.1244 - val_loss: 0.0614 - val_mae: 0.1532
Epoch 16/20
48/48 [==============================] - 7s 136ms/step - loss: 0.0434 - mae: 0.1243 - val_loss: 0.0614 - val_mae: 0.1530
Epoch 17/20
48/48 [==============================] - 6s 125ms/step - loss: 0.0433 - mae: 0.1242 - val_loss: 0.0616 - val_mae: 0.1533
Epoch 18/20
48/48 [==============================] - 6s 130ms/step - loss: 0.0432 - mae: 0.1241 - val_loss: 0.0615 - val_mae: 0.1530
Epoch 19/20
48/48 [==============================] - 6s 129ms/step - loss: 0.0431 - mae: 0.1240 - val_loss: 0.0615 - val_mae: 0.1531
Epoch 20/20
48/48 [==============================] - 6s 131ms/step - loss: 0.0431 - mae: 0.1239 - val_loss: 0.0616 - val_mae: 0.1534
In [ ]:
testingtrain = model.predict(trainX, verbose = 1)
print(testingtrain[0], testingtrain.shape)

testingtest = model.predict(testX, verbose = 1)
print(testingtest[0], testingtest.shape)
106/106 [==============================] - 2s 15ms/step
[[ 0.27850306  0.06787507  0.09765256 ...  0.04936545  0.35906217
   0.663935  ]
 [ 0.21163872  0.05494133 -0.00099223 ...  0.07515052  0.37083912
   0.57642436]
 [ 0.18881312  0.05595577  0.00302681 ...  0.07096143  0.25307286
   0.50167227]
 ...
 [ 0.6596868   0.09002638  0.17812411 ...  0.15936969  0.16002715
   0.5987005 ]
 [ 0.5462122   0.096172    0.18251029 ...  0.1863151   0.19167723
   0.6524329 ]
 [ 0.32407945  0.10768457  0.16258603 ...  0.2066173   0.3040643
   0.8061009 ]] (3388, 48, 299)
24/24 [==============================] - 0s 14ms/step
[[0.11035585 0.05132593 0.16246822 ... 0.17082095 0.5022152  0.55168235]
 [0.18572877 0.0715514  0.19525015 ... 0.18458517 0.49320716 0.5398158 ]
 [0.20012651 0.04613688 0.226009   ... 0.20827715 0.42734867 0.44182912]
 ...
 [0.04473718 0.12297882 0.2534483  ... 0.25416487 0.6027448  0.7193149 ]
 [0.04745527 0.0864066  0.27337664 ... 0.23792627 0.48375663 0.7118792 ]
 [0.0854774  0.10423328 0.26938054 ... 0.21532099 0.3803337  0.70741695]] (740, 48, 299)
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(testingtest[:48,1,:])
plt.show()
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))

testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.35 RMSE
Test Score: 0.21 MAE
In [ ]:
aa=[x for x in range(300)]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:300,1,1], marker='.', label="actual")
plt.plot(aa, testingtest[:300,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
df_SU.head()
Out[ ]:
1 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 ... 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
Datetime
2012-12-01 00:00:00 0.202 0.071 0.260 0.118 0.265 0.147 0.158 0.071 0.187 0.191 0.290 0.163 0.158 0.085 0.254 0.705 0.084 0.144 1.000 0.831 0.078 0.127 1.399 0.213 0.350 0.270 0.106 0.159 0.014 0.078 0.071 0.546 0.256 0.911 0.139 0.424 0.073 0.127 0.066 0.282 ... 0.199 0.268 0.194 0.055 0.131 0.581 1.069 0.182 1.163 0.224 0.617 0.110 0.047 0.268 0.131 0.165 0.110 0.167 0.113 0.273 0.793 0.131 0.062 0.083 0.138 0.266 0.250 0.210 0.276 0.208 0.401 0.094 0.240 0.231 0.199 0.064 0.268 0.116 0.225 0.919
2012-12-01 00:30:00 0.151 0.051 0.288 0.121 0.242 0.089 0.152 0.065 0.121 0.201 0.265 0.155 0.173 0.041 0.292 0.605 0.113 0.125 0.563 0.833 0.075 0.117 1.337 0.075 0.813 0.260 0.113 0.135 0.067 0.077 0.078 0.879 0.219 0.809 0.122 0.289 0.053 0.124 0.150 0.264 ... 0.399 0.324 0.170 0.046 0.144 0.044 0.883 0.148 1.164 0.231 0.371 0.087 0.103 0.296 0.113 0.145 0.130 0.186 0.125 0.180 0.981 0.088 0.051 0.123 0.112 0.289 0.263 0.073 0.720 0.191 0.431 0.056 0.238 0.295 0.237 0.050 0.309 0.115 0.065 0.770
2012-12-01 01:00:00 0.254 0.054 0.254 0.143 0.220 0.099 0.087 0.102 0.199 0.162 0.295 0.107 0.200 0.086 0.175 0.724 0.082 0.181 0.588 0.595 0.077 0.173 1.155 0.050 0.363 0.221 0.113 0.106 0.014 0.078 0.058 0.893 0.244 0.484 0.127 0.236 0.044 0.122 0.162 0.530 ... 1.280 0.275 0.168 0.044 0.151 0.056 0.346 0.173 1.263 0.219 0.673 0.093 0.130 0.285 0.175 0.133 0.209 0.182 0.094 0.179 1.196 0.094 0.073 0.077 0.103 0.263 0.250 0.070 0.718 0.160 0.404 0.094 0.211 0.173 0.160 0.049 0.265 0.120 0.150 0.280
2012-12-01 01:30:00 0.250 0.050 0.259 0.131 0.236 0.066 0.160 0.034 0.121 0.146 0.228 0.052 0.147 0.107 0.167 0.555 0.108 0.169 0.588 0.246 0.076 0.123 0.660 0.075 0.313 0.185 0.100 0.057 0.060 0.079 0.063 1.464 0.181 0.290 0.124 0.232 0.045 0.120 0.079 0.871 ... 0.924 0.266 0.167 0.054 0.157 0.063 0.323 0.159 1.175 0.212 0.290 0.101 0.069 0.256 0.131 0.122 0.169 0.162 0.125 0.156 1.144 0.156 0.041 0.128 0.097 0.366 0.375 0.454 0.796 0.148 0.319 0.050 0.537 0.266 0.173 0.060 0.264 0.128 0.134 0.209
2012-12-01 02:00:00 0.149 0.053 0.167 0.114 0.239 0.109 0.171 0.075 0.186 0.116 0.196 0.051 0.153 0.061 0.185 0.686 0.088 0.113 0.600 0.270 0.075 0.142 0.192 0.063 0.263 0.248 0.113 0.133 0.026 0.071 0.078 0.220 0.206 0.208 0.094 0.261 0.054 0.121 0.070 0.906 ... 0.364 0.272 0.166 0.042 0.181 0.544 0.264 0.107 1.189 0.220 0.253 0.099 0.056 0.312 0.119 0.091 0.186 0.144 0.113 0.156 1.059 0.113 0.082 0.069 0.099 0.314 0.175 0.076 0.735 0.165 0.282 0.088 0.837 0.260 0.134 0.060 0.292 0.105 0.201 0.214

5 rows × 299 columns

In [ ]:
df_SU = df_SU.values
#cap = np.percentile(X_A_C1, 97)   
#X_A_C1[X_A_C1 > cap] = cap
#print(X_A_C1.shape)
In [ ]:
training_size=int(df_SU.shape[0]*0.80)

test_size=(df_SU.shape[0])-training_size

train,test=df_SU[0:training_size],df_SU[training_size:(df_SU.shape[0])]

print(train.shape, test.shape)
(3456, 299) (864, 299)
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)

print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3312, 48, 299) (3312, 48, 299) 
 (720, 48, 299) (720, 48, 299)
In [ ]:
                             
###Building a sequential network:
Model = models.Sequential()
Model.add(layers.Dense(500, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model.add(Dropout(0.2))
Model.add(BatchNormalization())

Model.add(layers.Dense(300, activation='relu'))
Model.add(Dropout(0.2))
Model.add(BatchNormalization())

Model.add((Dense(trainX.shape[2])))
Model.compile(optimizer=  optimizers.Adam(lr=0.001), loss='mse', metrics=['mae'])
Model.summary()
Model: "sequential_13"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_27 (Dense)             (None, 48, 500)           150000    
_________________________________________________________________
dropout_14 (Dropout)         (None, 48, 500)           0         
_________________________________________________________________
batch_normalization_14 (Batc (None, 48, 500)           2000      
_________________________________________________________________
dense_28 (Dense)             (None, 48, 300)           150300    
_________________________________________________________________
dropout_15 (Dropout)         (None, 48, 300)           0         
_________________________________________________________________
batch_normalization_15 (Batc (None, 48, 300)           1200      
_________________________________________________________________
dense_29 (Dense)             (None, 48, 299)           89999     
=================================================================
Total params: 393,499
Trainable params: 391,899
Non-trainable params: 1,600
_________________________________________________________________
In [ ]:
model_train = Model.fit(trainX,trainY, epochs=20, validation_split = 0.10, batch_size = 64)
Epoch 1/20
47/47 [==============================] - 1s 24ms/step - loss: 0.5274 - mae: 0.4953 - val_loss: 0.1292 - val_mae: 0.1954
Epoch 2/20
47/47 [==============================] - 1s 19ms/step - loss: 0.2108 - mae: 0.2803 - val_loss: 0.0878 - val_mae: 0.1734
Epoch 3/20
47/47 [==============================] - 1s 18ms/step - loss: 0.1452 - mae: 0.2237 - val_loss: 0.0839 - val_mae: 0.1566
Epoch 4/20
47/47 [==============================] - 1s 18ms/step - loss: 0.1154 - mae: 0.1943 - val_loss: 0.0838 - val_mae: 0.1530
Epoch 5/20
47/47 [==============================] - 1s 18ms/step - loss: 0.0969 - mae: 0.1764 - val_loss: 0.0839 - val_mae: 0.1513
Epoch 6/20
47/47 [==============================] - 1s 17ms/step - loss: 0.0854 - mae: 0.1655 - val_loss: 0.0837 - val_mae: 0.1520
Epoch 7/20
47/47 [==============================] - 1s 18ms/step - loss: 0.0773 - mae: 0.1576 - val_loss: 0.0832 - val_mae: 0.1562
Epoch 8/20
47/47 [==============================] - 1s 18ms/step - loss: 0.0707 - mae: 0.1522 - val_loss: 0.0826 - val_mae: 0.1537
Epoch 9/20
47/47 [==============================] - 1s 18ms/step - loss: 0.0659 - mae: 0.1481 - val_loss: 0.0822 - val_mae: 0.1511
Epoch 10/20
47/47 [==============================] - 1s 18ms/step - loss: 0.0617 - mae: 0.1444 - val_loss: 0.0813 - val_mae: 0.1495
Epoch 11/20
47/47 [==============================] - 1s 18ms/step - loss: 0.0586 - mae: 0.1422 - val_loss: 0.0807 - val_mae: 0.1503
Epoch 12/20
47/47 [==============================] - 1s 18ms/step - loss: 0.0555 - mae: 0.1389 - val_loss: 0.0799 - val_mae: 0.1493
Epoch 13/20
47/47 [==============================] - 1s 17ms/step - loss: 0.0531 - mae: 0.1368 - val_loss: 0.0800 - val_mae: 0.1454
Epoch 14/20
47/47 [==============================] - 1s 17ms/step - loss: 0.0513 - mae: 0.1347 - val_loss: 0.0797 - val_mae: 0.1457
Epoch 15/20
47/47 [==============================] - 1s 18ms/step - loss: 0.0497 - mae: 0.1336 - val_loss: 0.0793 - val_mae: 0.1451
Epoch 16/20
47/47 [==============================] - 1s 18ms/step - loss: 0.0483 - mae: 0.1320 - val_loss: 0.0793 - val_mae: 0.1467
Epoch 17/20
47/47 [==============================] - 1s 18ms/step - loss: 0.0477 - mae: 0.1313 - val_loss: 0.0788 - val_mae: 0.1511
Epoch 18/20
47/47 [==============================] - 1s 17ms/step - loss: 0.0458 - mae: 0.1298 - val_loss: 0.0789 - val_mae: 0.1459
Epoch 19/20
47/47 [==============================] - 1s 17ms/step - loss: 0.0451 - mae: 0.1288 - val_loss: 0.0789 - val_mae: 0.1484
Epoch 20/20
47/47 [==============================] - 1s 17ms/step - loss: 0.0444 - mae: 0.1283 - val_loss: 0.0793 - val_mae: 0.1458
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(model_train.history['loss'], label='train')
plt.plot(model_train.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
Seq_train = Model.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
104/104 [==============================] - 1s 6ms/step
[[0.17721424 0.07397974 0.14187917 ... 0.16814977 0.39157006 0.6807956 ]
 [0.14635979 0.05378859 0.12643155 ... 0.15390122 0.25317478 0.5529491 ]
 [0.18477136 0.03786214 0.126268   ... 0.13199787 0.30294138 0.32265604]
 ...
 [0.6044416  0.12086585 0.13508864 ... 0.17101553 0.35330683 0.44291586]
 [0.5781594  0.08579174 0.11085396 ... 0.14481243 0.33272552 0.5447961 ]
 [0.1595138  0.09353478 0.12671545 ... 0.13798696 0.24246071 0.9358975 ]] (3312, 48, 299)
23/23 [==============================] - 0s 6ms/step
[[0.226872   0.07255141 0.12176927 ... 0.12696436 0.11265398 0.6762769 ]
 [0.16071342 0.0842979  0.12596038 ... 0.12476621 0.33457285 0.6074401 ]
 [0.20028889 0.06540545 0.1191463  ... 0.11233733 0.34991136 0.29219484]
 ...
 [0.6562116  0.10433976 0.12027942 ... 0.1690684  0.39147472 0.36376047]
 [0.82566327 0.09078115 0.17358108 ... 0.16241798 0.5328079  0.5013695 ]
 [0.19735801 0.08239163 0.11522762 ... 0.14036688 0.38001797 0.8275812 ]] (720, 48, 299)
In [ ]:
plt.imshow(testY[:48,1,:])
plt.show()
plt.imshow(Seq_test[:48,1,:])
plt.show()
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.26 RMSE
Test Score: 0.14 MAE
In [ ]:
aa=[x for x in range(300)]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:300,1,1])
plt.plot(aa, Seq_test[:300,1,1],'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))



#model.add(LSTM(100,  activation='relu'))
model.add(Dense(trainX.shape[2]))

#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_6 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_14"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_6 (LSTM)                (None, 48, 200)           400000    
_________________________________________________________________
dense_30 (Dense)             (None, 48, 299)           60099     
=================================================================
Total params: 460,099
Trainable params: 460,099
Non-trainable params: 0
_________________________________________________________________
In [ ]:
##### fit model
history_SU = model.fit(trainX, trainY, epochs=20, validation_split = 0.10, batch_size = 64, callbacks=[lr_decay])
#validation_data = (valX, valY)
Epoch 1/20
47/47 [==============================] - 6s 138ms/step - loss: 0.1469 - mae: 0.2183 - val_loss: 0.0907 - val_mae: 0.1749
Epoch 2/20
47/47 [==============================] - 6s 126ms/step - loss: 0.1040 - mae: 0.1822 - val_loss: 0.0865 - val_mae: 0.1635
Epoch 3/20
47/47 [==============================] - 6s 123ms/step - loss: 0.0921 - mae: 0.1711 - val_loss: 0.0853 - val_mae: 0.1626
Epoch 4/20
47/47 [==============================] - 6s 128ms/step - loss: 0.0852 - mae: 0.1646 - val_loss: 0.0848 - val_mae: 0.1584
Epoch 5/20
47/47 [==============================] - 6s 132ms/step - loss: 0.0808 - mae: 0.1605 - val_loss: 0.0847 - val_mae: 0.1544
Epoch 6/20
47/47 [==============================] - 6s 129ms/step - loss: 0.0778 - mae: 0.1574 - val_loss: 0.0838 - val_mae: 0.1554
Epoch 7/20
47/47 [==============================] - 6s 128ms/step - loss: 0.0754 - mae: 0.1554 - val_loss: 0.0836 - val_mae: 0.1542
Epoch 8/20
47/47 [==============================] - 6s 132ms/step - loss: 0.0737 - mae: 0.1538 - val_loss: 0.0833 - val_mae: 0.1544
Epoch 9/20
47/47 [==============================] - 7s 139ms/step - loss: 0.0723 - mae: 0.1525 - val_loss: 0.0832 - val_mae: 0.1535
Epoch 10/20
47/47 [==============================] - 6s 138ms/step - loss: 0.0713 - mae: 0.1515 - val_loss: 0.0830 - val_mae: 0.1533
Epoch 11/20
47/47 [==============================] - 6s 135ms/step - loss: 0.0704 - mae: 0.1507 - val_loss: 0.0828 - val_mae: 0.1533
Epoch 12/20
47/47 [==============================] - 7s 140ms/step - loss: 0.0698 - mae: 0.1501 - val_loss: 0.0829 - val_mae: 0.1528
Epoch 13/20
47/47 [==============================] - 6s 135ms/step - loss: 0.0692 - mae: 0.1496 - val_loss: 0.0828 - val_mae: 0.1530
Epoch 14/20
47/47 [==============================] - 6s 134ms/step - loss: 0.0688 - mae: 0.1492 - val_loss: 0.0828 - val_mae: 0.1528
Epoch 15/20
47/47 [==============================] - 6s 135ms/step - loss: 0.0684 - mae: 0.1489 - val_loss: 0.0828 - val_mae: 0.1531
Epoch 16/20
47/47 [==============================] - 6s 133ms/step - loss: 0.0682 - mae: 0.1486 - val_loss: 0.0828 - val_mae: 0.1531
Epoch 17/20
47/47 [==============================] - 6s 134ms/step - loss: 0.0679 - mae: 0.1484 - val_loss: 0.0828 - val_mae: 0.1530
Epoch 18/20
47/47 [==============================] - 6s 132ms/step - loss: 0.0678 - mae: 0.1483 - val_loss: 0.0829 - val_mae: 0.1529
Epoch 19/20
47/47 [==============================] - 6s 132ms/step - loss: 0.0676 - mae: 0.1482 - val_loss: 0.0827 - val_mae: 0.1531
Epoch 20/20
47/47 [==============================] - 6s 128ms/step - loss: 0.0675 - mae: 0.1480 - val_loss: 0.0828 - val_mae: 0.1528
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_SU.history['loss'], label='train')
plt.plot(history_SU.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_SU = model.predict(trainX, verbose = 1)
print(testingtrain_SU[0], testingtrain_SU.shape)

testingtest_SU = model.predict(testX, verbose = 1)
print(testingtest_SU[0], testingtest_SU.shape)
104/104 [==============================] - 2s 15ms/step
[[ 0.1350978   0.06668247  0.12711756 ...  0.15255617  0.35154215
   0.44238165]
 [ 0.16467206  0.05466458  0.15642639 ...  0.13368778  0.2851371
   0.41974482]
 [ 0.23042527  0.0023226   0.17715245 ...  0.10252986  0.23672046
   0.33599192]
 ...
 [ 0.45084542  0.01546762  0.05909082 ...  0.19767682  0.53890073
   0.45956475]
 [ 0.4368661   0.01069344  0.03781924 ...  0.12591013  0.45250458
   0.49056855]
 [ 0.27195352 -0.03674473  0.08765201 ...  0.11069105  0.2957446
   0.5239913 ]] (3312, 48, 299)
23/23 [==============================] - 0s 14ms/step
[[0.13821599 0.0985333  0.12969466 ... 0.10446303 0.22942592 0.39705893]
 [0.15100072 0.13538986 0.18402839 ... 0.08619238 0.23867166 0.4810909 ]
 [0.19961798 0.14147508 0.21103604 ... 0.07174859 0.19994779 0.45463508]
 ...
 [0.53105754 0.04768424 0.05279155 ... 0.1593633  0.4306873  0.51191497]
 [0.51369774 0.06814996 0.07372666 ... 0.12273184 0.34894186 0.5357799 ]
 [0.3478568  0.08576268 0.07981621 ... 0.10247991 0.28222087 0.65768164]] (720, 48, 299)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))

testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_SU[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))


#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_SU[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.27 RMSE
Test Score: 0.16 MAE
In [ ]:
aa=[x for x in range(300)]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:300,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_SU[:300,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
df_SP.head()
Out[ ]:
1 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 ... 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
Datetime
2012-09-01 00:00:00 0.107 0.081 0.110 0.133 0.563 0.051 0.326 0.014 0.041 0.147 0.184 0.039 0.129 0.088 0.742 0.172 0.062 0.094 0.400 1.749 0.059 0.068 0.155 0.100 0.519 0.601 0.094 0.056 0.028 0.370 0.085 0.120 0.275 0.738 0.074 0.233 0.043 0.074 0.060 0.106 ... 0.184 0.189 1.005 0.031 0.219 0.019 0.061 0.071 0.600 0.151 0.324 0.123 0.093 0.527 0.131 0.109 0.071 0.135 0.094 0.121 0.719 0.175 0.111 0.122 0.339 0.186 1.038 0.041 0.664 0.124 0.244 0.031 0.179 0.212 0.068 0.028 0.258 0.240 0.146 0.524
2012-09-01 00:30:00 0.049 0.091 0.108 0.076 0.582 0.057 0.153 0.047 0.008 0.127 0.143 0.039 0.124 0.080 0.729 0.157 0.075 0.063 0.338 1.370 0.068 0.091 0.135 0.050 0.463 0.580 0.094 0.065 0.051 0.336 0.060 0.088 0.263 0.743 0.114 0.176 0.050 0.045 0.097 0.113 ... 0.176 0.199 1.022 0.057 0.157 0.038 0.051 0.114 0.601 0.161 0.236 0.145 0.058 0.447 0.100 0.168 0.094 0.179 0.075 0.112 0.647 0.162 0.115 0.131 0.340 0.165 0.938 0.026 0.677 0.101 0.270 0.063 0.108 0.283 0.073 0.041 0.219 0.201 0.107 0.350
2012-09-01 01:00:00 0.102 0.042 0.098 0.107 0.599 0.074 0.152 0.012 0.041 0.142 0.110 0.089 0.126 0.110 0.761 0.144 0.076 0.088 0.350 0.948 0.065 0.045 0.140 0.063 0.413 0.587 0.100 0.048 0.030 0.334 0.062 0.151 0.163 0.751 0.160 0.165 0.025 0.041 0.077 0.035 ... 0.164 0.170 0.919 0.035 0.094 0.013 0.083 0.054 0.663 0.149 0.557 0.123 0.101 0.411 0.163 0.098 0.074 0.109 0.081 0.127 0.668 0.150 0.097 0.157 0.344 0.185 1.000 0.026 0.652 0.064 0.181 0.400 0.144 0.169 0.094 0.047 0.156 0.101 0.058 0.154
2012-09-01 01:30:00 0.053 0.036 0.099 0.067 0.627 0.070 0.091 0.011 0.009 0.143 0.225 0.087 0.105 0.086 0.778 0.149 0.061 0.100 0.350 0.382 0.552 0.053 0.151 0.088 0.538 0.309 0.100 0.079 0.054 0.328 0.059 0.129 0.163 0.785 0.131 0.177 0.047 0.046 0.075 0.064 ... 0.172 0.175 0.867 0.063 0.094 0.013 0.054 0.056 0.588 0.152 0.178 0.142 0.071 0.432 0.119 0.114 0.095 0.111 0.069 0.131 0.729 0.106 0.111 0.122 0.336 0.218 0.950 0.042 0.083 0.108 0.163 0.031 0.157 0.222 0.063 0.018 0.092 0.098 0.107 0.112
2012-09-01 02:00:00 0.086 0.052 0.107 0.074 0.612 0.087 0.155 0.011 0.041 0.133 0.105 0.050 0.111 0.065 0.806 0.286 0.064 0.088 0.450 0.252 0.202 0.122 0.132 0.050 0.463 0.329 0.100 0.059 0.069 0.298 0.058 0.136 0.175 0.779 0.069 0.150 0.028 0.059 0.103 0.034 ... 0.155 0.175 0.986 0.031 0.094 0.388 0.067 0.132 0.626 0.158 0.642 0.131 0.053 0.603 0.131 0.208 0.081 0.032 0.094 0.121 0.124 0.088 0.212 0.150 0.321 0.174 0.675 0.011 0.102 0.096 0.107 0.025 0.118 0.209 0.085 0.054 0.220 0.082 0.086 0.149

5 rows × 299 columns

In [ ]:
df_SP = df_SP.values
#cap = np.percentile(X_A_C2, 97)   
#X_A_C2[X_A_C2 > cap] = cap
In [ ]:
training_size=int(df_SP.shape[0]*0.80)

test_size=(df_SP.shape[0])-training_size

train,test=df_SP[0:training_size],df_SP[training_size:(df_SP.shape[0])]
In [ ]:
trainX, trainY = get_batches(train, 48, 48, 48)
testX, testY = get_batches(test,  48, 48, 48)


print(trainX.shape, trainY.shape, '\n',
      testX.shape, testY.shape)
(3350, 48, 299) (3350, 48, 299) 
 (730, 48, 299) (730, 48, 299)
In [ ]:
                             ###Building a sequential network:
Model = models.Sequential()
Model.add(layers.Dense(300, activation='relu', input_shape=(trainX.shape[1],trainX.shape[2])))
Model.add(Dropout(.2))
Model.add(BatchNormalization())

Model.add(layers.Dense(100, activation='relu'))
Model.add(Dropout(.2))
Model.add(BatchNormalization())

Model.add((Dense(trainX.shape[2])))
Model.compile(optimizer=  'adam', loss='mse', metrics=['mae'])
Model.summary()
Model: "sequential_15"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_31 (Dense)             (None, 48, 300)           90000     
_________________________________________________________________
dropout_16 (Dropout)         (None, 48, 300)           0         
_________________________________________________________________
batch_normalization_16 (Batc (None, 48, 300)           1200      
_________________________________________________________________
dense_32 (Dense)             (None, 48, 100)           30100     
_________________________________________________________________
dropout_17 (Dropout)         (None, 48, 100)           0         
_________________________________________________________________
batch_normalization_17 (Batc (None, 48, 100)           400       
_________________________________________________________________
dense_33 (Dense)             (None, 48, 299)           30199     
=================================================================
Total params: 151,899
Trainable params: 151,099
Non-trainable params: 800
_________________________________________________________________
In [ ]:
model_train = Model.fit(trainX,trainY, epochs=20, validation_split = 0.10, batch_size = 64)
Epoch 1/20
48/48 [==============================] - 1s 20ms/step - loss: 0.3273 - mae: 0.4163 - val_loss: 0.1128 - val_mae: 0.2102
Epoch 2/20
48/48 [==============================] - 1s 15ms/step - loss: 0.1387 - mae: 0.2465 - val_loss: 0.0645 - val_mae: 0.1357
Epoch 3/20
48/48 [==============================] - 1s 14ms/step - loss: 0.0894 - mae: 0.1896 - val_loss: 0.0597 - val_mae: 0.1377
Epoch 4/20
48/48 [==============================] - 1s 15ms/step - loss: 0.0739 - mae: 0.1682 - val_loss: 0.0588 - val_mae: 0.1348
Epoch 5/20
48/48 [==============================] - 1s 15ms/step - loss: 0.0653 - mae: 0.1550 - val_loss: 0.0579 - val_mae: 0.1332
Epoch 6/20
48/48 [==============================] - 1s 15ms/step - loss: 0.0603 - mae: 0.1466 - val_loss: 0.0573 - val_mae: 0.1314
Epoch 7/20
48/48 [==============================] - 1s 15ms/step - loss: 0.0570 - mae: 0.1411 - val_loss: 0.0567 - val_mae: 0.1299
Epoch 8/20
48/48 [==============================] - 1s 15ms/step - loss: 0.0547 - mae: 0.1375 - val_loss: 0.0563 - val_mae: 0.1283
Epoch 9/20
48/48 [==============================] - 1s 14ms/step - loss: 0.0529 - mae: 0.1348 - val_loss: 0.0557 - val_mae: 0.1271
Epoch 10/20
48/48 [==============================] - 1s 15ms/step - loss: 0.0514 - mae: 0.1331 - val_loss: 0.0554 - val_mae: 0.1258
Epoch 11/20
48/48 [==============================] - 1s 15ms/step - loss: 0.0500 - mae: 0.1314 - val_loss: 0.0551 - val_mae: 0.1253
Epoch 12/20
48/48 [==============================] - 1s 14ms/step - loss: 0.0489 - mae: 0.1303 - val_loss: 0.0548 - val_mae: 0.1247
Epoch 13/20
48/48 [==============================] - 1s 15ms/step - loss: 0.0479 - mae: 0.1294 - val_loss: 0.0543 - val_mae: 0.1246
Epoch 14/20
48/48 [==============================] - 1s 14ms/step - loss: 0.0471 - mae: 0.1285 - val_loss: 0.0541 - val_mae: 0.1243
Epoch 15/20
48/48 [==============================] - 1s 14ms/step - loss: 0.0463 - mae: 0.1277 - val_loss: 0.0537 - val_mae: 0.1241
Epoch 16/20
48/48 [==============================] - 1s 14ms/step - loss: 0.0455 - mae: 0.1269 - val_loss: 0.0535 - val_mae: 0.1247
Epoch 17/20
48/48 [==============================] - 1s 14ms/step - loss: 0.0449 - mae: 0.1265 - val_loss: 0.0535 - val_mae: 0.1244
Epoch 18/20
48/48 [==============================] - 1s 14ms/step - loss: 0.0443 - mae: 0.1260 - val_loss: 0.0533 - val_mae: 0.1240
Epoch 19/20
48/48 [==============================] - 1s 14ms/step - loss: 0.0438 - mae: 0.1254 - val_loss: 0.0534 - val_mae: 0.1243
Epoch 20/20
48/48 [==============================] - 1s 14ms/step - loss: 0.0433 - mae: 0.1249 - val_loss: 0.0536 - val_mae: 0.1247
In [ ]:
Seq_train = Model.predict(trainX, verbose = 1)
print(Seq_train[0], Seq_train.shape)

Seq_test = Model.predict(testX, verbose = 1)
print(Seq_test[0], Seq_test.shape)
105/105 [==============================] - 1s 5ms/step
[[0.21157733 0.07663964 0.1288548  ... 0.15159275 0.13180798 0.48404515]
 [0.23376498 0.07955229 0.12449098 ... 0.1428869  0.21562982 0.4037259 ]
 [0.22121972 0.06589307 0.10650204 ... 0.13127166 0.17974496 0.34546864]
 ...
 [0.5319785  0.0671313  0.14078385 ... 0.23216441 0.5241278  0.7352265 ]
 [0.27709356 0.07701583 0.10970547 ... 0.1964486  0.25530237 0.7959328 ]
 [0.15303259 0.09348851 0.08787327 ... 0.16913208 0.5531741  0.67253536]] (3350, 48, 299)
23/23 [==============================] - 0s 6ms/step
[[0.29723638 0.06338115 0.23943886 ... 0.24931385 0.35473067 0.8835342 ]
 [0.30044433 0.06122971 0.2084597  ... 0.2290062  0.44248822 0.70940334]
 [0.2949435  0.06806569 0.20767976 ... 0.26883107 0.507307   0.73140585]
 ...
 [0.1840106  0.06737056 0.33734038 ... 0.30317956 0.19517168 0.9487646 ]
 [0.19879572 0.07447573 0.32095826 ... 0.280676   0.6398721  1.0493051 ]
 [0.24690883 0.09858389 0.2532099  ... 0.22591019 0.6615903  1.0499836 ]] (730, 48, 299)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C1))
#print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY[:48,1,1], testingtrain_C2[:48,1,1]))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], Seq_test[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.25 RMSE
Test Score: 0.13 MAE
In [ ]:
aa=[x for x in range(200)]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:200,1,1], marker='.', label="actual")
plt.plot(aa, Seq_test[:200,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()
In [ ]:
model = Sequential()

model.add(LSTM(200, activation='relu', input_shape =(trainX.shape[1],trainX.shape[2]), return_sequences = True))
#model.add(Dense(507))

model.add(Dense(trainX.shape[2]))
#model.add(BatchNormalization())
#model.add(Dropout(.01))

#sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9),   'optimizers.Adam(lr=0.01)'
model.compile(optimizer = optimizers.Adam(lr=0.001), metrics = 'mae', loss='mse')
model.summary()
WARNING:tensorflow:Layer lstm_7 will not use cuDNN kernel since it doesn't meet the cuDNN kernel criteria. It will use generic GPU kernel as fallback when running on GPU
Model: "sequential_16"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_7 (LSTM)                (None, 48, 200)           400000    
_________________________________________________________________
dense_34 (Dense)             (None, 48, 299)           60099     
=================================================================
Total params: 460,099
Trainable params: 460,099
Non-trainable params: 0
_________________________________________________________________
In [ ]:
# fit model
history_SP = model.fit(trainX,trainY, epochs=20, validation_split = 0.10, batch_size = 32, callbacks=[lr_decay])
Epoch 1/20
95/95 [==============================] - 13s 135ms/step - loss: 0.0717 - mae: 0.1623 - val_loss: 0.0594 - val_mae: 0.1379
Epoch 2/20
95/95 [==============================] - 12s 125ms/step - loss: 0.0539 - mae: 0.1374 - val_loss: 0.0573 - val_mae: 0.1378
Epoch 3/20
95/95 [==============================] - 13s 132ms/step - loss: 0.0496 - mae: 0.1320 - val_loss: 0.0563 - val_mae: 0.1347
Epoch 4/20
95/95 [==============================] - 12s 130ms/step - loss: 0.0470 - mae: 0.1289 - val_loss: 0.0565 - val_mae: 0.1350
Epoch 5/20
95/95 [==============================] - 12s 128ms/step - loss: 0.0451 - mae: 0.1269 - val_loss: 0.0565 - val_mae: 0.1361
Epoch 6/20
95/95 [==============================] - 13s 135ms/step - loss: 0.0436 - mae: 0.1255 - val_loss: 0.0569 - val_mae: 0.1379
Epoch 7/20
95/95 [==============================] - 13s 138ms/step - loss: 0.0424 - mae: 0.1243 - val_loss: 0.0570 - val_mae: 0.1378
Epoch 8/20
95/95 [==============================] - 12s 131ms/step - loss: 0.0415 - mae: 0.1233 - val_loss: 0.0570 - val_mae: 0.1375
Epoch 9/20
95/95 [==============================] - 12s 127ms/step - loss: 0.0408 - mae: 0.1225 - val_loss: 0.0573 - val_mae: 0.1381
Epoch 10/20
95/95 [==============================] - 12s 132ms/step - loss: 0.0402 - mae: 0.1218 - val_loss: 0.0572 - val_mae: 0.1380
Epoch 11/20
95/95 [==============================] - 12s 128ms/step - loss: 0.0398 - mae: 0.1213 - val_loss: 0.0576 - val_mae: 0.1390
Epoch 12/20
95/95 [==============================] - 12s 128ms/step - loss: 0.0394 - mae: 0.1209 - val_loss: 0.0578 - val_mae: 0.1392
Epoch 13/20
95/95 [==============================] - 13s 139ms/step - loss: 0.0391 - mae: 0.1206 - val_loss: 0.0578 - val_mae: 0.1393
Epoch 14/20
95/95 [==============================] - 12s 128ms/step - loss: 0.0389 - mae: 0.1204 - val_loss: 0.0579 - val_mae: 0.1396
Epoch 15/20
95/95 [==============================] - 12s 132ms/step - loss: 0.0387 - mae: 0.1201 - val_loss: 0.0581 - val_mae: 0.1398
Epoch 16/20
95/95 [==============================] - 12s 128ms/step - loss: 0.0386 - mae: 0.1200 - val_loss: 0.0581 - val_mae: 0.1398
Epoch 17/20
95/95 [==============================] - 12s 129ms/step - loss: 0.0385 - mae: 0.1198 - val_loss: 0.0582 - val_mae: 0.1399
Epoch 18/20
95/95 [==============================] - 13s 134ms/step - loss: 0.0384 - mae: 0.1197 - val_loss: 0.0583 - val_mae: 0.1400
Epoch 19/20
95/95 [==============================] - 12s 130ms/step - loss: 0.0383 - mae: 0.1196 - val_loss: 0.0583 - val_mae: 0.1401
Epoch 20/20
95/95 [==============================] - 12s 128ms/step - loss: 0.0382 - mae: 0.1196 - val_loss: 0.0583 - val_mae: 0.1400
In [ ]:
fig = plt.figure(figsize=(5,3), dpi=75) #set figure size

plt.plot(history_SP.history['loss'], label='train')
plt.plot(history_SP.history['val_loss'], label='val')
plt.ylabel('Pérdida')
plt.xlabel('Epoch')
plt.legend()
plt.show()
In [ ]:
testingtrain_SP = model.predict(trainX, verbose = 1)
print(testingtrain_SP[0], testingtrain_SP.shape)

testingtest_SP = model.predict(testX, verbose = 1)
print(testingtest_SP[0], testingtest_SP.shape)
105/105 [==============================] - 2s 16ms/step
[[0.32396245 0.13062379 0.12841722 ... 0.16460402 0.2505293  0.47673345]
 [0.41831386 0.11132635 0.07817035 ... 0.16623965 0.15779845 0.32983407]
 [0.42742923 0.03474426 0.03847646 ... 0.13822193 0.09482349 0.2701179 ]
 ...
 [0.6584061  0.11419828 0.08184513 ... 0.24418734 0.621966   0.42462358]
 [0.39063555 0.11505776 0.21242651 ... 0.16778912 0.36959907 0.6289051 ]
 [0.0374862  0.09735631 0.2185192  ... 0.10338404 0.38287595 0.6939864 ]] (3350, 48, 299)
23/23 [==============================] - 0s 14ms/step
[[ 0.30179435  0.08080224  0.24472313 ...  0.27480617  0.512003
   0.9795474 ]
 [ 0.308923    0.05623619  0.17748424 ...  0.22411004  0.3473581
   0.7749532 ]
 [ 0.35642052  0.07262318  0.18882017 ...  0.21609467  0.44790533
   0.6460345 ]
 ...
 [ 0.18572274  0.02184355  0.2754551  ...  0.35418454  0.26047873
   0.7582728 ]
 [ 0.3315298  -0.0776576   0.32775816 ...  0.33501384  0.6627519
   0.861183  ]
 [ 0.28569895 -0.00688521  0.30678147 ...  0.27785495  1.0457366
   0.96181047]] (730, 48, 299)
In [ ]:
#trainScore = math.sqrt(mean_squared_error(trainY_RMSE, testingtrain_C2))
#print('Train RMSE Score: %.2f' % (trainScore))
#testScore = math.sqrt(mean_squared_error(testY_RMSE, testingtest_C2))
#print('Test RMSE Score: %.2f' % (testScore))


testScore = math.sqrt(mean_squared_error(testY[:,1,:], testingtest_SP[:,1,:]))
print('Test Score: %.2f RMSE' % (testScore))

#trainMAE = np.mean(mae(trainY_RMSE, testingtrain_C2))
#print('Train Score: %.2f MAE' % (trainMAE))

testMAE = np.mean(mae(testY[:,1,:], testingtest_SP[:,1,:]))
print('Test Score: %.2f MAE' % (testMAE))
Test Score: 0.25 RMSE
Test Score: 0.14 MAE
In [ ]:
aa=[x for x in range(testY.shape[0])]
plt.figure(figsize=(20,5))
plt.plot(aa, testY[:,1,1], marker='.', label="actual")
plt.plot(aa, testingtest_SP[:,1,1], 'r', label="prediction")
plt.ylabel('Energy', size=15)
plt.xlabel('Time step', size=15)
plt.legend(fontsize=15)

plt.show()